From 3c14cc187525fde6cc24fd5e3acec18c44ce9a1d Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Fri, 8 Dec 2023 12:14:15 -0800 Subject: [PATCH 001/178] Bump jetty version in hdfs-fixture to 9.4.53.v20231009 (#11539) * Bump jetty version in hdfs-fixture to 9.4.53.v20231009 Signed-off-by: Marc Handalian * fix changelog Signed-off-by: Marc Handalian --------- Signed-off-by: Marc Handalian --- CHANGELOG.md | 1 + test/fixtures/hdfs-fixture/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bec5e0a6845a9..67d3bb0c45398 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -148,6 +148,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445)) - Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448)) - Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) +- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 8247203e0ee8e..31f871236a322 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -33,7 +33,7 @@ apply plugin: 'opensearch.java' group = 'hdfs' versions << [ - 'jetty': '9.4.52.v20230823' + 'jetty': '9.4.53.v20231009' ] dependencies { From de61a2e542cc4d61cbe5307ba85609cd25ab2c41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 09:28:53 -0500 Subject: [PATCH 002/178] Bump commons-io:commons-io from 2.15.0 to 2.15.1 in /plugins/discovery-azure-classic (#11554) * Bump commons-io:commons-io in /plugins/discovery-azure-classic Bumps commons-io:commons-io from 2.15.0 to 2.15.1. --- updated-dependencies: - dependency-name: commons-io:commons-io dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 2 +- plugins/discovery-azure-classic/build.gradle | 2 +- .../discovery-azure-classic/licenses/commons-io-2.15.0.jar.sha1 | 1 - .../discovery-azure-classic/licenses/commons-io-2.15.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 plugins/discovery-azure-classic/licenses/commons-io-2.15.0.jar.sha1 create mode 100644 plugins/discovery-azure-classic/licenses/commons-io-2.15.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 67d3bb0c45398..ab5ba3d0a821f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -123,7 +123,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Dependencies - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) -- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446)) +- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) - Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.5.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 16f2d2c5f23c6..c3d70e9c64968 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -53,7 +53,7 @@ dependencies { api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" api "commons-lang:commons-lang:2.6" - api "commons-io:commons-io:2.15.0" + api "commons-io:commons-io:2.15.1" api 'javax.mail:mail:1.4.7' api 'javax.inject:javax.inject:1' api "com.sun.jersey:jersey-client:${versions.jersey}" diff --git a/plugins/discovery-azure-classic/licenses/commons-io-2.15.0.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-io-2.15.0.jar.sha1 deleted file mode 100644 index 73709383fd130..0000000000000 --- a/plugins/discovery-azure-classic/licenses/commons-io-2.15.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5c3c2db10f6f797430a7f9c696b4d1273768c924 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/commons-io-2.15.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-io-2.15.1.jar.sha1 new file mode 100644 index 0000000000000..47c5d13812a36 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-io-2.15.1.jar.sha1 @@ -0,0 +1 @@ +f11560da189ab563a5c8e351941415430e9304ea \ No newline at end of file From 1e72603fe9028239385c94506b1a0a6176a0405b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 09:30:10 -0500 Subject: [PATCH 003/178] Bump org.wiremock:wiremock-standalone from 3.1.0 to 3.3.1 (#11555) * Bump org.wiremock:wiremock-standalone from 3.1.0 to 3.3.1 Bumps [org.wiremock:wiremock-standalone](https://github.com/wiremock/wiremock) from 3.1.0 to 3.3.1. - [Release notes](https://github.com/wiremock/wiremock/releases) - [Commits](https://github.com/wiremock/wiremock/compare/3.1.0...3.3.1) --- updated-dependencies: - dependency-name: org.wiremock:wiremock-standalone dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ab5ba3d0a821f..8e5cb93f2f54b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -149,6 +149,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.xmlbeans:xmlbeans` from 5.1.1 to 5.2.0 ([#11448](https://github.com/opensearch-project/OpenSearch/pull/11448)) - Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) - Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) +- Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index b8db8504d5b85..34e94eac1826f 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -128,7 +128,7 @@ dependencies { testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" testFixturesApi gradleApi() testFixturesApi gradleTestKit() - testImplementation 'org.wiremock:wiremock-standalone:3.1.0' + testImplementation 'org.wiremock:wiremock-standalone:3.3.1' testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}" integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') { exclude module: "groovy" From 00dd577a0211cb7f1bcb75291ed1424cda215b64 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Mon, 11 Dec 2023 09:52:19 -0500 Subject: [PATCH 004/178] Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach (#11316) Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + .../telemetry/tracing/DefaultSpanScope.java | 16 +++- ...ContextBasedTracerContextStorageTests.java | 81 +++++++++++++++++++ 3 files changed, 94 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e5cb93f2f54b..8271d1655de04 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -185,6 +185,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543)) - Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934)) - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) +- Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316)) - Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369)) - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java index decbf49f795c4..93600da510977 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/tracing/DefaultSpanScope.java @@ -21,6 +21,7 @@ class DefaultSpanScope implements SpanScope { private final Span span; private final SpanScope previousSpanScope; + private final Span beforeSpan; private static final ThreadLocal spanScopeThreadLocal = new ThreadLocal<>(); private final TracerContextStorage tracerContextStorage; @@ -29,8 +30,14 @@ class DefaultSpanScope implements SpanScope { * @param span span * @param previousSpanScope before attached span scope. */ - private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextStorage tracerContextStorage) { + private DefaultSpanScope( + Span span, + final Span beforeSpan, + SpanScope previousSpanScope, + TracerContextStorage tracerContextStorage + ) { this.span = Objects.requireNonNull(span); + this.beforeSpan = beforeSpan; this.previousSpanScope = previousSpanScope; this.tracerContextStorage = tracerContextStorage; } @@ -43,7 +50,8 @@ private DefaultSpanScope(Span span, SpanScope previousSpanScope, TracerContextSt */ public static SpanScope create(Span span, TracerContextStorage tracerContextStorage) { final SpanScope beforeSpanScope = spanScopeThreadLocal.get(); - SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpanScope, tracerContextStorage); + final Span beforeSpan = tracerContextStorage.get(TracerContextStorage.CURRENT_SPAN); + SpanScope newSpanScope = new DefaultSpanScope(span, beforeSpan, beforeSpanScope, tracerContextStorage); return newSpanScope; } @@ -61,8 +69,8 @@ public SpanScope attach() { private void detach() { spanScopeThreadLocal.set(previousSpanScope); - if (previousSpanScope != null) { - tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, previousSpanScope.getSpan()); + if (beforeSpan != null) { + tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, beforeSpan); } else { tracerContextStorage.put(TracerContextStorage.CURRENT_SPAN, null); } diff --git a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java index 3a98a67b53920..ee816aa5f596d 100644 --- a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java +++ b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java @@ -145,6 +145,87 @@ public void run() { assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); } + public void testNoThreadContextToPreserve() throws InterruptedException, ExecutionException, TimeoutException { + final Runnable r = new Runnable() { + @Override + public void run() { + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + + final Span local1 = tracer.startSpan(SpanCreationContext.internal().name("test-local-1")); + try (SpanScope localScope = tracer.withSpanInScope(local1)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local1.getParentSpan(), is(nullValue())); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local1)); + } + } + + final Span local2 = tracer.startSpan(SpanCreationContext.internal().name("test-local-2")); + try (SpanScope localScope = tracer.withSpanInScope(local2)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local2.getParentSpan(), is(nullValue())); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local2)); + } + } + + final Span local3 = tracer.startSpan(SpanCreationContext.internal().name("test-local-3")); + try (SpanScope localScope = tracer.withSpanInScope(local3)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local3.getParentSpan(), is(nullValue())); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local3)); + } + } + } + }; + + executorService.submit(threadContext.preserveContext(r)).get(1, TimeUnit.SECONDS); + + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } + + public void testPreservingContextThreadContextMultipleSpans() throws InterruptedException, ExecutionException, TimeoutException { + final Span span = tracer.startSpan(SpanCreationContext.internal().name("test")); + + try (SpanScope scope = tracer.withSpanInScope(span)) { + final Runnable r = new Runnable() { + @Override + public void run() { + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(span)); + + final Span local1 = tracer.startSpan(SpanCreationContext.internal().name("test-local-1")); + try (SpanScope localScope = tracer.withSpanInScope(local1)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local1.getParentSpan(), is(span)); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local1)); + } + } + + final Span local2 = tracer.startSpan(SpanCreationContext.internal().name("test-local-2")); + try (SpanScope localScope = tracer.withSpanInScope(local2)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local2.getParentSpan(), is(span)); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local2)); + } + } + + final Span local3 = tracer.startSpan(SpanCreationContext.internal().name("test-local-3")); + try (SpanScope localScope = tracer.withSpanInScope(local3)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(local3.getParentSpan(), is(span)); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(local3)); + } + } + } + }; + + executorService.submit(threadContext.preserveContext(r)).get(1, TimeUnit.SECONDS); + } + + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } + public void testPreservingContextAndStashingThreadContext() throws InterruptedException, ExecutionException, TimeoutException { final Span span = tracer.startSpan(SpanCreationContext.internal().name("test")); From 6bb53dea24264fc91dc46824041157066a16b74e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 10:46:44 -0500 Subject: [PATCH 005/178] Bump org.apache.commons:commons-compress from 1.24.0 to 1.25.0 (#11556) * Bump org.apache.commons:commons-compress from 1.24.0 to 1.25.0 Bumps org.apache.commons:commons-compress from 1.24.0 to 1.25.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-compress dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- CHANGELOG.md | 1 + buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8271d1655de04..ea0a22a4493cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -150,6 +150,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) - Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) - Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) +- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 34e94eac1826f..5548e4a83f5ec 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -103,7 +103,7 @@ dependencies { api localGroovy() api 'commons-codec:commons-codec:1.16.0' - api 'org.apache.commons:commons-compress:1.24.0' + api 'org.apache.commons:commons-compress:1.25.0' api 'org.apache.ant:ant:1.10.14' api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0' From 4e727416c5fa231d5401bed9b3a4e28d912b0cb0 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Mon, 11 Dec 2023 12:52:58 -0500 Subject: [PATCH 006/178] Restore support for Java 8 for RestClient (#11562) Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + client/rest/build.gradle | 11 +++++++++-- .../java/org/opensearch/client/RestClient.java | 10 ++++++++-- .../HeapBufferedAsyncEntityConsumerTests.java | 18 +++++++++--------- client/test/build.gradle | 11 +++++++++-- 5 files changed, 36 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ea0a22a4493cd..c36e27336b1a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -170,6 +170,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) - Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) +- Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) ### Deprecated diff --git a/client/rest/build.gradle b/client/rest/build.gradle index ff3c322c5ccf7..f18df65dfddfa 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -34,8 +34,8 @@ apply plugin: 'opensearch.build' apply plugin: 'opensearch.publish' java { - targetCompatibility = JavaVersion.VERSION_11 - sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_1_8 + sourceCompatibility = JavaVersion.VERSION_1_8 } base { @@ -109,3 +109,10 @@ thirdPartyAudit.ignoreMissingClasses( 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener' ) + +tasks.withType(JavaCompile) { + // Suppressing '[options] target value 8 is obsolete and will be removed in a future release' + configure(options) { + options.compilerArgs << '-Xlint:-options' + } +} diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java index 7691c01daefea..15905add76c4f 100644 --- a/client/rest/src/main/java/org/opensearch/client/RestClient.java +++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java @@ -1116,9 +1116,15 @@ public long getContentLength() { if (chunkedEnabled.get()) { return -1L; } else { - long size; + long size = 0; + final byte[] buf = new byte[8192]; + int nread = 0; + try (InputStream is = getContent()) { - size = is.readAllBytes().length; + // read to EOF which may read more or less than buffer size + while ((nread = is.read(buf)) > 0) { + size += nread; + } } catch (IOException ex) { size = -1L; } diff --git a/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java index 6a4b176edd011..fdfe49ca901c9 100644 --- a/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java +++ b/client/rest/src/test/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumerTests.java @@ -35,34 +35,34 @@ public void tearDown() { } public void testConsumerAllocatesBufferLimit() throws IOException { - consumer.consume(randomByteBufferOfLength(1000).flip()); + consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip()); assertThat(consumer.getBuffer().capacity(), equalTo(1000)); } public void testConsumerAllocatesEmptyBuffer() throws IOException { - consumer.consume(ByteBuffer.allocate(0).flip()); + consumer.consume((ByteBuffer) ByteBuffer.allocate(0).flip()); assertThat(consumer.getBuffer().capacity(), equalTo(0)); } public void testConsumerExpandsBufferLimits() throws IOException { - consumer.consume(randomByteBufferOfLength(1000).flip()); - consumer.consume(randomByteBufferOfLength(2000).flip()); - consumer.consume(randomByteBufferOfLength(3000).flip()); + consumer.consume((ByteBuffer) randomByteBufferOfLength(1000).flip()); + consumer.consume((ByteBuffer) randomByteBufferOfLength(2000).flip()); + consumer.consume((ByteBuffer) randomByteBufferOfLength(3000).flip()); assertThat(consumer.getBuffer().capacity(), equalTo(6000)); } public void testConsumerAllocatesLimit() throws IOException { - consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT).flip()); + consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip()); assertThat(consumer.getBuffer().capacity(), equalTo(BUFFER_LIMIT)); } public void testConsumerFailsToAllocateOverLimit() throws IOException { - assertThrows(ContentTooLongException.class, () -> consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT + 1).flip())); + assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT + 1).flip())); } public void testConsumerFailsToExpandOverLimit() throws IOException { - consumer.consume(randomByteBufferOfLength(BUFFER_LIMIT).flip()); - assertThrows(ContentTooLongException.class, () -> consumer.consume(randomByteBufferOfLength(1).flip())); + consumer.consume((ByteBuffer) randomByteBufferOfLength(BUFFER_LIMIT).flip()); + assertThrows(ContentTooLongException.class, () -> consumer.consume((ByteBuffer) randomByteBufferOfLength(1).flip())); } private static ByteBuffer randomByteBufferOfLength(int length) { diff --git a/client/test/build.gradle b/client/test/build.gradle index f81a009389681..b77865df6decf 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -30,8 +30,8 @@ apply plugin: 'opensearch.build' java { - targetCompatibility = JavaVersion.VERSION_11 - sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_1_8 + sourceCompatibility = JavaVersion.VERSION_1_8 } base { @@ -69,3 +69,10 @@ dependenciesInfo.enabled = false //we aren't releasing this jar thirdPartyAudit.enabled = false test.enabled = false + +tasks.withType(JavaCompile) { + // Suppressing '[options] target value 8 is obsolete and will be removed in a future release' + configure(options) { + options.compilerArgs << '-Xlint:-options' + } +} From cc38e6bdb1511a1ac510462ca96ef29bcaac6457 Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Mon, 11 Dec 2023 10:48:01 -0800 Subject: [PATCH 007/178] Exclude apache avro version included with hadoop-minicluster (#11564) Signed-off-by: Marc Handalian --- test/fixtures/hdfs-fixture/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index 31f871236a322..bb9d70ac39398 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -50,6 +50,7 @@ dependencies { exclude module: "json-io" exclude module: "logback-core" exclude module: "logback-classic" + exclude module: "avro" } api "org.codehaus.jettison:jettison:${versions.jettison}" api "org.apache.commons:commons-compress:${versions.commonscompress}" From a2f792d4f6c5ecf485a15ba5e7791d4b66c8149e Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Mon, 11 Dec 2023 10:54:02 -0800 Subject: [PATCH 008/178] Bump grgit-core ffrom 5.2.0 to 5.2.1 (#11565) Signed-off-by: Marc Handalian --- buildSrc/build.gradle | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 5548e4a83f5ec..0efa170250a7b 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -122,7 +122,7 @@ dependencies { api 'org.jruby.jcodings:jcodings:1.0.58' api 'org.jruby.joni:joni:2.2.1' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" - api "org.ajoberstar.grgit:grgit-core:5.2.0" + api "org.ajoberstar.grgit:grgit-core:5.2.1" testFixturesApi "junit:junit:${props.getProperty('junit')}" testFixturesApi "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${props.getProperty('randomizedrunner')}" @@ -133,10 +133,6 @@ dependencies { integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') { exclude module: "groovy" } - implementation('org.ajoberstar.grgit:grgit-core:5.2.0') { - exclude group: 'org.eclipse.jgit', module: 'org.eclipse.jgit' - } - implementation 'org.eclipse.jgit:org.eclipse.jgit:6.7.0.202309050840-r' } configurations.all { From 27981147bdf4d6112714116e4519603d8d24887f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:23:19 -0800 Subject: [PATCH 009/178] Bump actions/stale from 8 to 9 (#11557) * Bump actions/stale from 8 to 9 Bumps [actions/stale](https://github.com/actions/stale) from 8 to 9. - [Release notes](https://github.com/actions/stale/releases) - [Changelog](https://github.com/actions/stale/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/stale/compare/v8...v9) --- updated-dependencies: - dependency-name: actions/stale dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] * Update changelog Signed-off-by: dependabot[bot] --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- .github/workflows/stalled.yml | 2 +- CHANGELOG.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/stalled.yml b/.github/workflows/stalled.yml index 19ec9c9438bbe..d171332b402f1 100644 --- a/.github/workflows/stalled.yml +++ b/.github/workflows/stalled.yml @@ -17,7 +17,7 @@ jobs: private_key: ${{ secrets.APP_PRIVATE_KEY }} installation_id: 22958780 - name: Stale PRs - uses: actions/stale@v8 + uses: actions/stale@v9 with: repo-token: ${{ steps.github_app_token.outputs.token }} stale-pr-label: 'stalled' diff --git a/CHANGELOG.md b/CHANGELOG.md index c36e27336b1a2..4cd28cbbe3933 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -151,6 +151,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) - Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) - Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) +- Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) From 66d4e9e0d5fca1c50b84c08dccc9f5e9164c64c1 Mon Sep 17 00:00:00 2001 From: Craig Perkins Date: Mon, 11 Dec 2023 21:33:14 -0500 Subject: [PATCH 010/178] Create separate transport action for render search template action (#11170) Signed-off-by: Craig Perkins --- CHANGELOG.md | 1 + .../script/mustache/MustacheModulePlugin.java | 1 + .../mustache/RenderSearchTemplateAction.java | 21 +++++++++++++ .../RestRenderSearchTemplateAction.java | 2 +- .../TransportRenderSearchTemplateAction.java | 30 +++++++++++++++++++ .../TransportSearchTemplateAction.java | 20 +++++++++++-- 6 files changed, 71 insertions(+), 4 deletions(-) create mode 100644 modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java create mode 100644 modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cd28cbbe3933..2507b6eb89018 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -120,6 +120,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317)) - Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) - [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) +- Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) ### Dependencies - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java index 434a117d9b47e..6b33ac3b6be08 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheModulePlugin.java @@ -65,6 +65,7 @@ public ScriptEngine getScriptEngine(Settings settings, Collection> getActions() { return Arrays.asList( new ActionHandler<>(SearchTemplateAction.INSTANCE, TransportSearchTemplateAction.class), + new ActionHandler<>(RenderSearchTemplateAction.INSTANCE, TransportRenderSearchTemplateAction.class), new ActionHandler<>(MultiSearchTemplateAction.INSTANCE, TransportMultiSearchTemplateAction.class) ); } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java new file mode 100644 index 0000000000000..1feb916c4ce73 --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RenderSearchTemplateAction.java @@ -0,0 +1,21 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.script.mustache; + +import org.opensearch.action.ActionType; + +public class RenderSearchTemplateAction extends ActionType { + + public static final RenderSearchTemplateAction INSTANCE = new RenderSearchTemplateAction(); + public static final String NAME = "indices:data/read/search/template/render"; + + private RenderSearchTemplateAction() { + super(NAME, SearchTemplateResponse::new); + } +} diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java index 7a94fc45837d9..9ffa2c94cb56f 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/RestRenderSearchTemplateAction.java @@ -81,6 +81,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client renderRequest.setScript(id); } - return channel -> client.execute(SearchTemplateAction.INSTANCE, renderRequest, new RestToXContentListener<>(channel)); + return channel -> client.execute(RenderSearchTemplateAction.INSTANCE, renderRequest, new RestToXContentListener<>(channel)); } } diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java new file mode 100644 index 0000000000000..993d77ffaa75c --- /dev/null +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportRenderSearchTemplateAction.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.script.mustache; + +import org.opensearch.action.support.ActionFilters; +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.script.ScriptService; +import org.opensearch.transport.TransportService; + +public class TransportRenderSearchTemplateAction extends TransportSearchTemplateAction { + + @Inject + public TransportRenderSearchTemplateAction( + TransportService transportService, + ActionFilters actionFilters, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + NodeClient client + ) { + super(RenderSearchTemplateAction.NAME, transportService, actionFilters, scriptService, xContentRegistry, client); + } +} diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java index 6e8b9d059b583..d75cc0337b66c 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/TransportSearchTemplateAction.java @@ -61,9 +61,9 @@ public class TransportSearchTemplateAction extends HandledTransportAction listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); From b5700b4317257e2b247d14de17636780819fba8d Mon Sep 17 00:00:00 2001 From: Mohit Godwani <81609427+mgodwan@users.noreply.github.com> Date: Tue, 12 Dec 2023 20:46:30 +0530 Subject: [PATCH 011/178] Move RAM Usage to precomputed block (#11587) Signed-off-by: mgodwan --- .../java/org/opensearch/index/engine/IndexVersionValue.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/index/engine/IndexVersionValue.java b/server/src/main/java/org/opensearch/index/engine/IndexVersionValue.java index 803d106a2f25e..c297022f5766d 100644 --- a/server/src/main/java/org/opensearch/index/engine/IndexVersionValue.java +++ b/server/src/main/java/org/opensearch/index/engine/IndexVersionValue.java @@ -45,6 +45,7 @@ final class IndexVersionValue extends VersionValue { private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IndexVersionValue.class); + private static final long TRANSLOG_LOC_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Translog.Location.class); private final Translog.Location translogLocation; @@ -55,7 +56,7 @@ final class IndexVersionValue extends VersionValue { @Override public long ramBytesUsed() { - return RAM_BYTES_USED + RamUsageEstimator.shallowSizeOf(translogLocation); + return RAM_BYTES_USED + (translogLocation == null ? 0L : TRANSLOG_LOC_RAM_BYTES_USED); } @Override From a8da66caa173c27f98c2124e012d372c64183d6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=BD=95=E5=BB=B6=E9=BE=99?= Date: Tue, 12 Dec 2023 23:36:01 +0800 Subject: [PATCH 012/178] Automatically add scheme to discovery.ec2.endpoint (#11512) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Yanlong He Signed-off-by: 何廢龙 --- CHANGELOG.md | 1 + .../discovery/ec2/AwsEc2ServiceImpl.java | 15 ++++++++++++- .../discovery/ec2/AwsEc2ServiceImplTests.java | 22 +++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2507b6eb89018..c5ea3f83bffc7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -172,6 +172,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) - Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) +- Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) ### Deprecated diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java index 51f0ad9526e55..8d31403b77c36 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -99,7 +99,7 @@ protected Ec2Client buildClient( if (Strings.hasText(endpoint)) { logger.debug("using explicit ec2 endpoint [{}]", endpoint); - builder.endpointOverride(URI.create(endpoint)); + builder.endpointOverride(URI.create(getFullEndpoint(endpoint))); } if (Strings.hasText(region)) { @@ -110,6 +110,19 @@ protected Ec2Client buildClient( return SocketAccess.doPrivileged(builder::build); } + protected String getFullEndpoint(String endpoint) { + if (!Strings.hasText(endpoint)) { + return null; + } + if (endpoint.startsWith("http")) { + return endpoint; + } + + // if no scheme is provided, default to https + logger.debug("no scheme found in endpoint [{}], defaulting to https", endpoint); + return "https://" + endpoint; + } + static ProxyConfiguration buildProxyConfiguration(Logger logger, Ec2ClientSettings clientSettings) { if (Strings.hasText(clientSettings.proxyHost)) { try { diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java index 81310f7e2e3c3..327a7da2c4cab 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java @@ -202,4 +202,26 @@ public void testAWSConfigurationWithAwsSettings() { assertTrue(clientOverrideConfiguration.retryPolicy().isPresent()); assertThat(clientOverrideConfiguration.retryPolicy().get().numRetries(), is(10)); } + + public void testGetFullEndpointWithScheme() { + final Settings settings = Settings.builder().put("discovery.ec2.endpoint", "http://ec2.us-west-2.amazonaws.com").build(); + Ec2ClientSettings clientSettings = Ec2ClientSettings.getClientSettings(settings); + + AwsEc2ServiceImpl awsEc2ServiceImpl = new AwsEc2ServiceImpl(); + + String endpoint = awsEc2ServiceImpl.getFullEndpoint(clientSettings.endpoint); + assertEquals("http://ec2.us-west-2.amazonaws.com", endpoint); + } + + public void testGetFullEndpointWithoutScheme() { + final Settings settings = Settings.builder().put("discovery.ec2.endpoint", "ec2.us-west-2.amazonaws.com").build(); + Ec2ClientSettings clientSettings = Ec2ClientSettings.getClientSettings(settings); + + AwsEc2ServiceImpl awsEc2ServiceImpl = new AwsEc2ServiceImpl(); + + String endpoint = awsEc2ServiceImpl.getFullEndpoint(clientSettings.endpoint); + assertEquals("https://ec2.us-west-2.amazonaws.com", endpoint); + + assertNull(awsEc2ServiceImpl.getFullEndpoint("")); + } } From 79b1b74cec2c7d8b2a94b9542e1c569cc27bc0fb Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Tue, 12 Dec 2023 12:23:36 -0800 Subject: [PATCH 013/178] Add release notes for 1.3.14 (#11592) (#11594) * Add release notes for 1.3.14 * Add missing bug fixes to release notes --------- Signed-off-by: Marc Handalian --- .../opensearch.release-notes-1.3.14.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 release-notes/opensearch.release-notes-1.3.14.md diff --git a/release-notes/opensearch.release-notes-1.3.14.md b/release-notes/opensearch.release-notes-1.3.14.md new file mode 100644 index 0000000000000..319f5a79781c7 --- /dev/null +++ b/release-notes/opensearch.release-notes-1.3.14.md @@ -0,0 +1,18 @@ +## 2023-12-12 Version 1.3.14 Release Notes + +### Upgrades +- Bump asm from 9.5 to 9.6 ([#10302](https://github.com/opensearch-project/OpenSearch/pull/10302)) +- Bump netty from 4.1.97.Final to 4.1.99.Final ([#10303](https://github.com/opensearch-project/OpenSearch/pull/10303)) +- Bump `netty` from 4.1.99.Final to 4.1.100.Final ([#10564](https://github.com/opensearch-project/OpenSearch/pull/10564)) +- Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294)) +- Bump `org.apache.zookeeper:zookeper` from 3.8.0 to 3.8.3 ([#11476](https://github.com/opensearch-project/OpenSearch/pull/11476)) +- Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297)) +- Bump `org.apache.avro:avro` from 1.10.2 to 1.11.3 ([#11502](https://github.com/opensearch-project/OpenSearch/pull/11502)) +- Bump `jetty` from 9.4.51.v20230217 to 9.4.52.v20230823 ([#11501](https://github.com/opensearch-project/OpenSearch/pull/11501)) +- Bump `io.projectreactor:reactor-core` from 3.4.23 to 3.4.34 and reactor-netty from 1.0.24 to 1.0.39 ([#11500](https://github.com/opensearch-project/OpenSearch/pull/11500)) +- Bump `logback-core` and `logback-classic` to 1.2.13 ([#11521](https://github.com/opensearch-project/OpenSearch/pull/11521)) +- Bumps `jetty` version from 9.4.52.v20230823 to 9.4.53.v20231009 ([#11539](https://github.com/opensearch-project/OpenSearch/pull/11539)) + +### Bug Fixes +- Use iterative approach to evaluate Regex.simpleMatch ([#11060](https://github.com/opensearch-project/OpenSearch/pull/11060)) +- Improve compressed request handling ([#10261](https://github.com/opensearch-project/OpenSearch/pull/10261)) From 074bc6a91c05a5ed0e3d36cd672c905b0bf023c3 Mon Sep 17 00:00:00 2001 From: Jay Deng Date: Tue, 12 Dec 2023 14:33:01 -0800 Subject: [PATCH 014/178] Add heuristic slice_size for slice level top buckets in concurrent segment search (#11585) Signed-off-by: Jay Deng --- .../java/org/opensearch/search/internal/SearchContext.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index 2dabb825cd227..cc43f4e5d79fb 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -35,7 +35,6 @@ import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.Query; -import org.apache.lucene.util.ArrayUtil; import org.opensearch.action.search.SearchShardTask; import org.opensearch.action.search.SearchType; import org.opensearch.common.Nullable; @@ -87,6 +86,8 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; +import static org.opensearch.search.aggregations.bucket.BucketUtils.suggestShardSideQueueSize; + /** * This class encapsulates the state needed to execute a search. It holds a reference to the * shards point in time snapshot (IndexReader / ContextIndexSearcher) and allows passing on @@ -410,7 +411,7 @@ public boolean shouldUseConcurrentSearch() { */ public LocalBucketCountThresholds asLocalBucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { if (shouldUseConcurrentSearch()) { - return new LocalBucketCountThresholds(0, ArrayUtil.MAX_ARRAY_LENGTH - 1); + return new LocalBucketCountThresholds(0, suggestShardSideQueueSize(bucketCountThresholds.getShardSize())); } else { return new LocalBucketCountThresholds(bucketCountThresholds.getShardMinDocCount(), bucketCountThresholds.getShardSize()); } From bb671969b9de5b916e821b1c2d6da048bd56383e Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Wed, 13 Dec 2023 11:27:50 -0800 Subject: [PATCH 015/178] Convert issue and bug templates to use forms (#11534) Signed-off-by: Marc Handalian Signed-off-by: Marc Handalian --- .github/ISSUE_TEMPLATE/bug_template.md | 33 --------- .github/ISSUE_TEMPLATE/bug_template.yml | 79 ++++++++++++++++++++++ .github/ISSUE_TEMPLATE/feature_request.md | 19 ------ .github/ISSUE_TEMPLATE/feature_request.yml | 62 +++++++++++++++++ .github/workflows/add-untriaged.yml | 20 ------ .github/workflows/triage.yml | 34 ++++++++++ 6 files changed, 175 insertions(+), 72 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_template.md create mode 100644 .github/ISSUE_TEMPLATE/bug_template.yml delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml delete mode 100644 .github/workflows/add-untriaged.yml create mode 100644 .github/workflows/triage.yml diff --git a/.github/ISSUE_TEMPLATE/bug_template.md b/.github/ISSUE_TEMPLATE/bug_template.md deleted file mode 100644 index be3ae51b237ee..0000000000000 --- a/.github/ISSUE_TEMPLATE/bug_template.md +++ /dev/null @@ -1,33 +0,0 @@ ---- -name: πŸ› Bug report -about: Create a report to help us improve -title: "[BUG]" -labels: 'bug, untriaged' -assignees: '' ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Plugins** -Please list all plugins currently enabled. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Host/Environment (please complete the following information):** - - OS: [e.g. iOS] - - Version [e.g. 22] - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml new file mode 100644 index 0000000000000..2cd1ee8a7e688 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_template.yml @@ -0,0 +1,79 @@ +name: πŸ› Bug report +description: Create a report to help us improve +title: "[BUG] " +labels: ['bug, untriaged'] +body: + - type: textarea + attributes: + label: Describe the bug + description: A clear and concise description of what the bug is. + validations: + required: true + - type: dropdown + attributes: + label: Related component + description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other". + multiple: false + options: + - Other + - Build + - Clients + - Cluster Manager + - Extensions + - Indexing:Performance + - Indexing:Replication + - Indexing + - Libraries + - Plugins + - Search:Aggregations + - Search:Performance + - Search:Query Capabilities + - Search:Query Insights + - Search:Relevance + - Search:Remote Search + - Search:Resiliency + - Search:Searchable Snapshots + - Search + - Storage:Durability + - Storage:Performance + - Storage:Remote + - Storage:Snapshots + - Storage + validations: + required: true + - type: textarea + attributes: + label: To Reproduce + description: Steps to reproduce the behavior. + value: | + 1. Go to '...' + 2. Click on '....' + 3. Scroll down to '....' + 4. See error + validations: + required: true + - type: textarea + attributes: + label: Expected behavior + description: A clear and concise description of what you expected to happen. + validations: + required: true + - type: textarea + attributes: + label: Additional Details + description: Add any other context about the problem here. + value: | + **Plugins** + Please list all plugins currently enabled. + + **Screenshots** + If applicable, add screenshots to help explain your problem. + + **Host/Environment (please complete the following information):** + - OS: [e.g. iOS] + - Version [e.g. 22] + + **Additional context** + Add any other context about the problem here. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 53b3614a34342..0000000000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,19 +0,0 @@ ---- -name: πŸŽ† Feature request -about: Suggest an idea for this project -title: '' -labels: 'enhancement, untriaged' -assignees: '' ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 0000000000000..d93ac8b590706 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,62 @@ +name: πŸŽ† Feature request +description: Suggest an idea for this project +title: '[Feature Request] <title>' +labels: ['enhancement, untriaged'] +body: + - type: textarea + attributes: + label: Is your feature request related to a problem? Please describe + description: A clear and concise description of what the problem is. + placeholder: Ex. I'm always frustrated when [...] + validations: + required: true + - type: textarea + attributes: + label: Describe the solution you'd like + description: A clear and concise description of what you want to happen. + validations: + required: true + - type: dropdown + attributes: + label: Related component + description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other". + multiple: false + options: + - Other + - Build + - Clients + - Cluster Manager + - Extensions + - Indexing:Performance + - Indexing:Replication + - Indexing + - Libraries + - Plugins + - Search:Aggregations + - Search:Performance + - Search:Query Capabilities + - Search:Query Insights + - Search:Relevance + - Search:Remote Search + - Search:Resiliency + - Search:Searchable Snapshots + - Search + - Storage:Durability + - Storage:Performance + - Storage:Remote + - Storage:Snapshots + - Storage + validations: + required: true + - type: textarea + attributes: + label: Describe alternatives you've considered + description: A clear and concise description of any alternative solutions or features you've considered. + validations: + required: false + - type: textarea + attributes: + label: Additional context + description: Add any other context or screenshots about the feature request here. + validations: + required: false diff --git a/.github/workflows/add-untriaged.yml b/.github/workflows/add-untriaged.yml deleted file mode 100644 index 38de96f663051..0000000000000 --- a/.github/workflows/add-untriaged.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Apply 'untriaged' label during issue lifecycle - -on: - issues: - types: [opened, reopened, transferred] - -jobs: - apply-label: - if: github.repository == 'opensearch-project/OpenSearch' - runs-on: ubuntu-latest - steps: - - uses: actions/github-script@v7 - with: - script: | - github.rest.issues.addLabels({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: ['untriaged'] - }) diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml new file mode 100644 index 0000000000000..4a4a5eda628d2 --- /dev/null +++ b/.github/workflows/triage.yml @@ -0,0 +1,34 @@ +name: Auto triage based on the component label in issue + +on: + issues: + types: [opened, reopened, transferred] + +jobs: + apply-label: + if: github.repository == 'opensearch-project/OpenSearch' + runs-on: ubuntu-latest + steps: + - uses: actions/github-script@v7 + with: + script: | + const { issue, repository } = context.payload; + const { number, body } = issue; + const { owner, name } = repository; + const regex = /###\sRelated\scomponent\n\n(\w*)\n/gm; + let match; + while ( ( match = regex.exec( body ) ) ) { + const [ , component_label ] = match; + await github.rest.issues.addLabels( { + owner: owner.login, + repo: name, + issue_number: number, + labels: [ `${ component_label }` ], + } ); + } + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ['untriaged'] + }) From 9f4296343cc0b0089c8b5e6912910e3a8339a017 Mon Sep 17 00:00:00 2001 From: Peter Nied <petern@amazon.com> Date: Thu, 14 Dec 2023 16:02:38 -0600 Subject: [PATCH 016/178] Personalize code ownership areas (#11600) Signed-off-by: Peter Nied <petern@amazon.com> --- .github/CODEOWNERS | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8076adcf00ca9..68d02d5f7d544 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1,27 @@ -* @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah +# CODEOWNERS manages notifications, not PR approvals +# For PR approvals see /.github/workflows/maintainer-approval.yml + +# Files have a single rule applied, the last match decides the owner +# If you would like to more specifically apply ownership, include existing owner in new sub fields + +# To verify changes of CODEOWNERS file +# In VSCode +# 1. Install extension https://marketplace.visualstudio.com/items?itemName=jasonnutter.vscode-codeowners +# 2. Go to a file +# 3. Use the command palette to run the CODEOWNERS: Show owners of current file command, which will display all code owners for the current file. + +# Default ownership for all repo files +* @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah + +/modules/transport-netty4/ @peternied + +/plugins/identity-shiro/ @peternied + +/server/src/main/java/org/opensearch/extensions/ @peternied +/server/src/main/java/org/opensearch/identity/ @peternied +/server/src/main/java/org/opensearch/threadpool/ @peternied +/server/src/main/java/org/opensearch/transport/ @peternied + +/.github/ @peternied + +/MAINTAINERS.md @abbashus @adnapibar @anasalkouz @andrross @Bukhtawar @CEHENKLE @dblock @dbwiddis @dreamer-89 @gbbafna @kartg @kotwanikunal @mch2 @msfroh @nknize @owaiskazi19 @peternied @reta @Rishikesh1159 @ryanbogan @sachinpkale @saratvemulapalli @setiah @shwetathareja @sohami @tlfeng @VachaShah From 49ce678cbccaf03ac1da862425d10844c40b4558 Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Thu, 14 Dec 2023 14:19:32 -0800 Subject: [PATCH 017/178] Fix regex in triage.yml to support labels with spaces (#11618) Signed-off-by: Marc Handalian <marc.handalian@gmail.com> --- .github/workflows/triage.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index 4a4a5eda628d2..c305818bdb0a9 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -15,7 +15,7 @@ jobs: const { issue, repository } = context.payload; const { number, body } = issue; const { owner, name } = repository; - const regex = /###\sRelated\scomponent\n\n(\w*)\n/gm; + const regex = /###\sRelated\scomponent\n\n(\w.*)\n/gm; let match; while ( ( match = regex.exec( body ) ) ) { const [ , component_label ] = match; From 332f4c535643b459071aa9d2b3e5d50d137d8523 Mon Sep 17 00:00:00 2001 From: Dhwanil Patel <dhwanip@amazon.com> Date: Fri, 15 Dec 2023 04:42:03 +0530 Subject: [PATCH 018/178] Fix flaky test of IndexShardTests.testRestoreLocalHistoryFromTranslogOnPromotion (#11487) Signed-off-by: Dhwanil Patel <dhwanip@amazon.com> --- .../test/java/org/opensearch/index/shard/IndexShardTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java index dc2111fdcfc56..46be10ce62840 100644 --- a/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/IndexShardTests.java @@ -1416,7 +1416,7 @@ public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, indexShard, indexShard.getPendingPrimaryTerm() + 1, globalCheckpoint, - randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo), + randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNoOfUpdatesOrDeletesBeforeRollback), new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { From 51cab98a674bef95fe358ddfd7741489482e1212 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 09:44:18 -0500 Subject: [PATCH 019/178] Bump com.gradle.enterprise from 3.15.1 to 3.16.1 (#11629) * Bump com.gradle.enterprise from 3.15.1 to 3.16.1 Bumps com.gradle.enterprise from 3.15.1 to 3.16.1. --- updated-dependencies: - dependency-name: com.gradle.enterprise dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- settings.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c5ea3f83bffc7..2ce89159e4783 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -143,7 +143,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) - Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)) -- Bump `com.gradle.enterprise` from 3.14.1 to 3.15.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339)) +- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629)) - Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447)) - Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450)) - Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445)) diff --git a/settings.gradle b/settings.gradle index 139d45013710f..24ab4a7a22237 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.15.1" + id "com.gradle.enterprise" version "3.16.1" } ext.disableBuildCache = hasProperty('DISABLE_BUILD_CACHE') || System.getenv().containsKey('DISABLE_BUILD_CACHE') From 2b1c9ae3c4109e30a6e6a30553ea915a61bd5ba7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E4=BD=95=E5=BB=B6=E9=BE=99?= <hey.yanlong@gmail.com> Date: Mon, 18 Dec 2023 23:42:39 +0800 Subject: [PATCH 020/178] Fix automatic addition of protocol in discovery endpoint (#11612) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix #11609 Signed-off-by: Yanlong He <hey.yanlong@gmail.com> * Fix #11609 Signed-off-by: heyanlong <hey.yanlong@gmail.com> * Update AwsEc2ServiceImplTests.java Signed-off-by: 何廢龙 <hey.yanlong@gmail.com> * spotless Signed-off-by: heyanlong <hey.yanlong@gmail.com> --------- Signed-off-by: Yanlong He <hey.yanlong@gmail.com> Signed-off-by: heyanlong <hey.yanlong@gmail.com> Signed-off-by: 何廢龙 <hey.yanlong@gmail.com> --- CHANGELOG.md | 1 + .../org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java | 2 +- .../opensearch/discovery/ec2/AwsEc2ServiceImplTests.java | 6 ++++++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2ce89159e4783..15d4374d27929 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -195,6 +195,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) +- Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) ### Security diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java index 8d31403b77c36..a2e920761b655 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -114,7 +114,7 @@ protected String getFullEndpoint(String endpoint) { if (!Strings.hasText(endpoint)) { return null; } - if (endpoint.startsWith("http")) { + if (endpoint.startsWith("http://") || endpoint.startsWith("https://")) { return endpoint; } diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java index 327a7da2c4cab..3164abe456515 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/AwsEc2ServiceImplTests.java @@ -211,6 +211,10 @@ public void testGetFullEndpointWithScheme() { String endpoint = awsEc2ServiceImpl.getFullEndpoint(clientSettings.endpoint); assertEquals("http://ec2.us-west-2.amazonaws.com", endpoint); + + assertEquals("http://httpserver.example.com", awsEc2ServiceImpl.getFullEndpoint("http://httpserver.example.com")); + + assertEquals("https://httpserver.example.com", awsEc2ServiceImpl.getFullEndpoint("https://httpserver.example.com")); } public void testGetFullEndpointWithoutScheme() { @@ -222,6 +226,8 @@ public void testGetFullEndpointWithoutScheme() { String endpoint = awsEc2ServiceImpl.getFullEndpoint(clientSettings.endpoint); assertEquals("https://ec2.us-west-2.amazonaws.com", endpoint); + assertEquals("https://httpserver.example.com", awsEc2ServiceImpl.getFullEndpoint("httpserver.example.com")); + assertNull(awsEc2ServiceImpl.getFullEndpoint("")); } } From 9146c19c4a2d8cea2e882dfd938cf80d04789f04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 15:15:06 -0500 Subject: [PATCH 021/178] Bump com.squareup.okio:okio from 3.6.0 to 3.7.0 in /test/fixtures/hdfs-fixture (#11632) * Bump com.squareup.okio:okio in /test/fixtures/hdfs-fixture Bumps [com.squareup.okio:okio](https://github.com/square/okio) from 3.6.0 to 3.7.0. - [Changelog](https://github.com/square/okio/blob/master/CHANGELOG.md) - [Commits](https://github.com/square/okio/compare/parent-3.6.0...parent-3.7.0) --- updated-dependencies: - dependency-name: com.squareup.okio:okio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- test/fixtures/hdfs-fixture/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 15d4374d27929..5fe1fc9f5e165 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -132,7 +132,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639)) - Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297)) - Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635)) -- Bump `com.squareup.okio:okio` from 3.5.0 to 3.6.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637)) +- Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632)) - Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.0 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270)) - Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504)) - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index bb9d70ac39398..7adf29792f27d 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -76,6 +76,6 @@ dependencies { runtimeOnly("com.squareup.okhttp3:okhttp:4.12.0") { exclude group: "com.squareup.okio" } - runtimeOnly "com.squareup.okio:okio:3.6.0" + runtimeOnly "com.squareup.okio:okio:3.7.0" runtimeOnly "org.xerial.snappy:snappy-java:1.1.10.5" } From 217c382fc53196460fbc9ba52ee93afc4a7b361a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 15:56:42 -0600 Subject: [PATCH 022/178] Bump actions/download-artifact from 3 to 4 (#11631) Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 3 to 4. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v3...v4) Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/check-compatibility.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml index d93f7e73b91e7..f400991366da1 100644 --- a/.github/workflows/check-compatibility.yml +++ b/.github/workflows/check-compatibility.yml @@ -48,7 +48,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Download results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: results.txt From 37dae8265e76c1a2da1aa56aeb3a6ce18005c4a7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 17:42:04 -0500 Subject: [PATCH 023/178] Bump com.netflix.nebula.ospackage-base from 11.5.0 to 11.6.0 in /distribution/packages (#11630) * Bump com.netflix.nebula.ospackage-base in /distribution/packages Bumps com.netflix.nebula.ospackage-base from 11.5.0 to 11.6.0. --- updated-dependencies: - dependency-name: com.netflix.nebula.ospackage-base dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- distribution/packages/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5fe1fc9f5e165..ebad434b8fd5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -126,7 +126,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) - Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) -- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.5.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295)) +- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) - Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508)) - Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639)) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index cb05661dc74a4..ededa7bff34d8 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.5.0" + id "com.netflix.nebula.ospackage-base" version "11.6.0" } void addProcessFilesTask(String type, boolean jdk) { From 863d45370d62868d3d5a926798571efffc3b90d2 Mon Sep 17 00:00:00 2001 From: Craig Perkins <cwperx@amazon.com> Date: Tue, 19 Dec 2023 09:48:11 -0500 Subject: [PATCH 024/178] Add additional handling in SearchTemplateRequest when simulate is set to true (#11591) Signed-off-by: Craig Perkins <cwperx@amazon.com> --- CHANGELOG.md | 1 + .../script/mustache/SearchTemplateRequest.java | 9 +++++++++ .../mustache/SearchTemplateRequestTests.java | 16 ++++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ebad434b8fd5f..52b89e57c0959 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -121,6 +121,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) - [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) - Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) ### Dependencies - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java index 1aabea30fc651..d02c5f1efa591 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/SearchTemplateRequest.java @@ -259,16 +259,25 @@ public void writeTo(StreamOutput out) throws IOException { @Override public String[] indices() { + if (request == null) { + return new String[0]; + } return request.indices(); } @Override public IndicesOptions indicesOptions() { + if (request == null) { + return SearchRequest.DEFAULT_INDICES_OPTIONS; + } return request.indicesOptions(); } @Override public IndicesRequest indices(String... indices) { + if (request == null) { + return new SearchRequest(new String[0]).indices(indices); + } return request.indices(indices); } } diff --git a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java index 72443d1323b44..71ce616fd5d94 100644 --- a/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java +++ b/modules/lang-mustache/src/test/java/org/opensearch/script/mustache/SearchTemplateRequestTests.java @@ -32,6 +32,7 @@ package org.opensearch.script.mustache; +import org.opensearch.action.search.SearchRequest; import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.script.ScriptType; import org.opensearch.search.RandomSearchRequestGenerator; @@ -110,4 +111,19 @@ public static SearchTemplateRequest createRandomRequest() { request.setRequest(RandomSearchRequestGenerator.randomSearchRequest(SearchSourceBuilder::searchSource)); return request; } + + public void testSimulatedSearchTemplateRequest() { + SearchTemplateRequest request = new SearchTemplateRequest(); + request.setSimulate(true); + + assertEquals(0, request.indices().length); + assertEquals(SearchRequest.DEFAULT_INDICES_OPTIONS, request.indicesOptions()); + assertEquals(2, request.indices("index1", "index2").indices().length); + + SearchTemplateRequest randomRequest = createRandomRequest(); + int expectedIndicesLength = randomRequest.indices().length; + request.setSimulate(true); + + assertEquals(expectedIndicesLength, randomRequest.indices().length); + } } From f1243eafdac3bde3a037c2b2b53d52819020b483 Mon Sep 17 00:00:00 2001 From: Peter Nied <petern@amazon.com> Date: Tue, 19 Dec 2023 09:08:46 -0600 Subject: [PATCH 025/178] Bump actions/upload-artifact from 3 to 4 (#11647) Signed-off-by: Peter Nied <petern@amazon.com> --- .github/workflows/check-compatibility.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/check-compatibility.yml b/.github/workflows/check-compatibility.yml index f400991366da1..d6c65ddd446cd 100644 --- a/.github/workflows/check-compatibility.yml +++ b/.github/workflows/check-compatibility.yml @@ -36,7 +36,7 @@ jobs: echo "### Compatible components" >> "${{ github.workspace }}/results.txt" && grep -e 'Compatible component' $HOME/gradlew-check.out | sed -e 's/Compatible component: \[\(.*\)\]/- \1/' >> "${{ github.workspace }}/results.txt" - name: Upload results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: results.txt path: ${{ github.workspace }}/results.txt From 2c8ee1947b55a1cc5bc1a114b82e3a3b8a99851e Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Tue, 19 Dec 2023 14:15:59 -0600 Subject: [PATCH 026/178] Add context for custom GC tunings (#11651) Signed-off-by: Andrew Ross <andrross@amazon.com> --- distribution/src/config/jvm.options | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index 1a0abcbaf9c88..f0ac98faffda9 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -38,12 +38,12 @@ 8-10:-XX:+UseCMSInitiatingOccupancyOnly ## G1GC Configuration -# NOTE: G1 GC is only supported on JDK version 10 or later -# to use G1GC, uncomment the next two lines and update the version on the -# following three lines to your version of the JDK -# 10:-XX:-UseConcMarkSweepGC -# 10:-XX:-UseCMSInitiatingOccupancyOnly +# NOTE: G1GC is the default GC for all JDKs 11 and newer 11-:-XX:+UseG1GC +# See https://github.com/elastic/elasticsearch/pull/46169 for the history +# behind these settings, but the tl;dr is that default values can lead +# to situations where heap usage grows enough to trigger a circuit breaker +# before GC kicks in. 11-:-XX:G1ReservePercent=25 11-:-XX:InitiatingHeapOccupancyPercent=30 From 6a6ab32e532d16de222645ccfae376c778c68991 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Thu, 21 Dec 2023 11:52:22 -0800 Subject: [PATCH 027/178] [Segment Replication] Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations (#11583) * Add new cluster level setting that prevents index level replication type setting overrides Signed-off-by: Suraj Singh <surajrider@gmail.com> * Block restore on mis-matching replication type setting Signed-off-by: Suraj Singh <surajrider@gmail.com> * Address review comments and rebase Signed-off-by: Suraj Singh <surajrider@gmail.com> * Address review comments Signed-off-by: Suraj Singh <surajrider@gmail.com> * Use appropriate variable names Signed-off-by: Suraj Singh <surajrider@gmail.com> * Fix failing integ test Signed-off-by: Suraj Singh <surajrider@gmail.com> --------- Signed-off-by: Suraj Singh <surajrider@gmail.com> --- CHANGELOG.md | 1 + .../SegmentReplicationClusterSettingIT.java | 142 +++++++++++++++++- .../SegmentReplicationSnapshotIT.java | 63 ++++++++ .../metadata/MetadataCreateIndexService.java | 29 ++++ .../common/settings/ClusterSettings.java | 3 +- .../opensearch/indices/IndicesService.java | 13 ++ .../MetadataCreateIndexServiceTests.java | 104 +++++++++++++ 7 files changed, 346 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 52b89e57c0959..ca10ecf57e2b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -122,6 +122,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) - Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) +- Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) ### Dependencies - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java index a82fd8d845709..c4e8ccfc0ecec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java @@ -10,7 +10,10 @@ import org.opensearch.action.admin.indices.settings.get.GetSettingsRequest; import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse; +import org.opensearch.action.admin.indices.shrink.ResizeType; +import org.opensearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.collect.Tuple; import org.opensearch.common.settings.Settings; import org.opensearch.core.index.Index; import org.opensearch.index.IndexModule; @@ -18,8 +21,15 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.OpenSearchIntegTestCase; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; + import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; +import static org.opensearch.indices.IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_SETTING_REPLICATION_TYPE; +import static org.hamcrest.Matchers.hasSize; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0) public class SegmentReplicationClusterSettingIT extends OpenSearchIntegTestCase { @@ -29,6 +39,9 @@ public class SegmentReplicationClusterSettingIT extends OpenSearchIntegTestCase protected static final int SHARD_COUNT = 1; protected static final int REPLICA_COUNT = 1; + protected static final String REPLICATION_MISMATCH_VALIDATION_ERROR = + "Validation Failed: 1: index setting [index.replication.type] is not allowed to be set as [cluster.index.restrict.replication.type=true];"; + @Override public Settings indexSettings() { return Settings.builder() @@ -44,14 +57,6 @@ protected boolean addMockInternalEngine() { return false; } - @Override - protected Settings nodeSettings(int nodeOrdinal) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) - .build(); - } - public void testIndexReplicationSettingOverridesSegRepClusterSetting() throws Exception { Settings settings = Settings.builder().put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); final String ANOTHER_INDEX = "test-index"; @@ -123,4 +128,125 @@ public void testIndexReplicationSettingOverridesDocRepClusterSetting() throws Ex assertEquals(indicesService.indexService(anotherIndex).getIndexSettings().isSegRepEnabled(), false); } + public void testReplicationTypesOverrideNotAllowed_IndexAPI() { + // Generate mutually exclusive replication strategies at cluster and index level + List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList(); + ReplicationType clusterLevelReplication = replicationStrategies.get(0); + ReplicationType indexLevelReplication = replicationStrategies.get(1); + Settings nodeSettings = Settings.builder() + .put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + internalCluster().startClusterManagerOnlyNode(nodeSettings); + internalCluster().startDataOnlyNode(nodeSettings); + Settings indexSettings = Settings.builder().put(indexSettings()).put(SETTING_REPLICATION_TYPE, indexLevelReplication).build(); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> createIndex(INDEX_NAME, indexSettings)); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage()); + } + + public void testReplicationTypesOverrideNotAllowed_WithTemplates() { + // Generate mutually exclusive replication strategies at cluster and index level + List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList(); + ReplicationType clusterLevelReplication = replicationStrategies.get(0); + ReplicationType templateReplicationType = replicationStrategies.get(1); + Settings nodeSettings = Settings.builder() + .put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + internalCluster().startClusterManagerOnlyNode(nodeSettings); + internalCluster().startDataOnlyNode(nodeSettings); + internalCluster().startDataOnlyNode(nodeSettings); + logger.info( + "--> Create index with template replication {} and cluster level replication {}", + templateReplicationType, + clusterLevelReplication + ); + // Create index template + client().admin() + .indices() + .preparePutTemplate("template_1") + .setPatterns(Collections.singletonList("test-idx*")) + .setSettings(Settings.builder().put(indexSettings()).put(SETTING_REPLICATION_TYPE, templateReplicationType).build()) + .setOrder(0) + .get(); + + GetIndexTemplatesResponse response = client().admin().indices().prepareGetTemplates().get(); + assertThat(response.getIndexTemplates(), hasSize(1)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> createIndex(INDEX_NAME)); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage()); + } + + public void testReplicationTypesOverrideNotAllowed_WithResizeAction() { + // Generate mutually exclusive replication strategies at cluster and index level + List<ReplicationType> replicationStrategies = getRandomReplicationTypesAsList(); + ReplicationType clusterLevelReplication = replicationStrategies.get(0); + ReplicationType indexLevelReplication = replicationStrategies.get(1); + Settings nodeSettings = Settings.builder() + .put(CLUSTER_SETTING_REPLICATION_TYPE, clusterLevelReplication) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + internalCluster().startClusterManagerOnlyNode(nodeSettings); + internalCluster().startDataOnlyNode(nodeSettings); + internalCluster().startDataOnlyNode(nodeSettings); + logger.info( + "--> Create index with index level replication {} and cluster level replication {}", + indexLevelReplication, + clusterLevelReplication + ); + + // Define resize action and target shard count. + List<Tuple<ResizeType, Integer>> resizeActionsList = new ArrayList<>(); + final int initialShardCount = 2; + resizeActionsList.add(new Tuple<>(ResizeType.SPLIT, 2 * initialShardCount)); + resizeActionsList.add(new Tuple<>(ResizeType.SHRINK, SHARD_COUNT)); + resizeActionsList.add(new Tuple<>(ResizeType.CLONE, initialShardCount)); + + Tuple<ResizeType, Integer> resizeActionTuple = resizeActionsList.get(random().nextInt(resizeActionsList.size())); + final String targetIndexName = resizeActionTuple.v1().name().toLowerCase(Locale.ROOT) + "-target"; + + logger.info("--> Performing resize action {} with shard count {}", resizeActionTuple.v1(), resizeActionTuple.v2()); + + Settings indexSettings = Settings.builder() + .put(indexSettings()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, initialShardCount) + .put(SETTING_REPLICATION_TYPE, clusterLevelReplication) + .build(); + createIndex(INDEX_NAME, indexSettings); + + // Block writes + client().admin().indices().prepareUpdateSettings(INDEX_NAME).setSettings(Settings.builder().put("index.blocks.write", true)).get(); + ensureGreen(); + + // Validate resize action fails + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().admin() + .indices() + .prepareResizeIndex(INDEX_NAME, targetIndexName) + .setResizeType(resizeActionTuple.v1()) + .setSettings( + Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", resizeActionTuple.v2()) + .putNull("index.blocks.write") + .put(SETTING_REPLICATION_TYPE, indexLevelReplication) + .build() + ) + .get() + ); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage()); + } + + /** + * Generate a list of ReplicationType with random ordering + * + * @return List of ReplicationType values + */ + private List<ReplicationType> getRandomReplicationTypesAsList() { + List<ReplicationType> replicationStrategies = List.of(ReplicationType.SEGMENT, ReplicationType.DOCUMENT); + int randomReplicationIndex = random().nextInt(replicationStrategies.size()); + ReplicationType clusterLevelReplication = replicationStrategies.get(randomReplicationIndex); + ReplicationType indexLevelReplication = replicationStrategies.get(1 - randomReplicationIndex); + return List.of(clusterLevelReplication, indexLevelReplication); + } } diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java index c2ce7e48f92d2..2c12c0abb202b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java @@ -31,6 +31,7 @@ import java.util.concurrent.TimeUnit; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; +import static org.opensearch.indices.IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_SETTING_REPLICATION_TYPE; import static org.opensearch.indices.replication.SegmentReplicationBaseIT.waitForSearchableDocs; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @@ -47,6 +48,9 @@ public class SegmentReplicationSnapshotIT extends AbstractSnapshotIntegTestCase private static final String REPOSITORY_NAME = "test-segrep-repo"; private static final String SNAPSHOT_NAME = "test-segrep-snapshot"; + protected static final String REPLICATION_MISMATCH_VALIDATION_ERROR = + "Validation Failed: 1: index setting [index.replication.type] is not allowed to be set as [cluster.index.restrict.replication.type=true];"; + public Settings segRepEnableIndexSettings() { return getShardSettings().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); } @@ -306,4 +310,63 @@ public void testSnapshotRestoreOnIndexWithSegRepClusterSetting() throws Exceptio IndicesService indicesService = internalCluster().getInstance(IndicesService.class); assertEquals(indicesService.indexService(index).getIndexSettings().isSegRepEnabled(), false); } + + /** + * 1. Create index in DOCUMENT replication type + * 2. Snapshot index + * 3. Add new set of nodes with `cluster.indices.replication.strategy` set to SEGMENT and `cluster.index.restrict.replication.type` + * set to true. + * 4. Perform restore on new set of nodes to validate restored index has `DOCUMENT` replication. + */ + public void testSnapshotRestoreOnRestrictReplicationSetting() throws Exception { + final int documentCount = scaledRandomIntBetween(1, 10); + String originalClusterManagerNode = internalCluster().startClusterManagerOnlyNode(); + + // Starting two nodes with primary and replica shards respectively. + final String primaryNode = internalCluster().startDataOnlyNode(); + prepareCreate( + INDEX_NAME, + Settings.builder() + // we want to override cluster replication setting by passing a index replication setting + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, REPLICA_COUNT) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, SHARD_COUNT) + ).get(); + ensureYellowAndNoInitializingShards(INDEX_NAME); + final String replicaNode = internalCluster().startDataOnlyNode(); + ensureGreen(INDEX_NAME); + + for (int i = 0; i < documentCount; i++) { + client().prepareIndex(INDEX_NAME).setId(String.valueOf(i)).setSource("foo", "bar").get(); + } + + createSnapshot(); + + // Delete index + assertAcked(client().admin().indices().delete(new DeleteIndexRequest(INDEX_NAME)).get()); + assertFalse("index [" + INDEX_NAME + "] should have been deleted", indexExists(INDEX_NAME)); + + // Start new set of nodes with cluster level replication type setting and restrict replication type setting. + Settings settings = Settings.builder() + .put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + + // Start new cluster manager node + String newClusterManagerNode = internalCluster().startClusterManagerOnlyNode(settings); + + // Remove older nodes + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(originalClusterManagerNode)); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode)); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); + + String newPrimaryNode = internalCluster().startDataOnlyNode(settings); + String newReplicaNode = internalCluster().startDataOnlyNode(settings); + + // Perform snapshot restore + logger.info("--> Performing snapshot restore to target index"); + + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> restoreSnapshotWithSettings(null)); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getMessage()); + } } diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index bb1bf94f5e984..3384393d8feaf 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -907,6 +907,10 @@ static Settings aggregateIndexSettings( ); } + List<String> validationErrors = new ArrayList<>(); + validateIndexReplicationTypeSettings(indexSettingsBuilder.build(), clusterSettings).ifPresent(validationErrors::add); + validateErrors(request.index(), validationErrors); + Settings indexSettings = indexSettingsBuilder.build(); /* * We can not validate settings until we have applied templates, otherwise we do not know the actual settings @@ -1246,7 +1250,11 @@ private void validate(CreateIndexClusterStateUpdateRequest request, ClusterState public void validateIndexSettings(String indexName, final Settings settings, final boolean forbidPrivateIndexSettings) throws IndexCreationException { List<String> validationErrors = getIndexSettingsValidationErrors(settings, forbidPrivateIndexSettings, indexName); + validateIndexReplicationTypeSettings(settings, clusterService.getClusterSettings()).ifPresent(validationErrors::add); + validateErrors(indexName, validationErrors); + } + private static void validateErrors(String indexName, List<String> validationErrors) { if (validationErrors.isEmpty() == false) { ValidationException validationException = new ValidationException(); validationException.addValidationErrors(validationErrors); @@ -1322,6 +1330,27 @@ private static List<String> validateIndexCustomPath(Settings settings, @Nullable return validationErrors; } + /** + * Validates {@code index.replication.type} is matches with cluster level setting {@code cluster.indices.replication.strategy} + * when {@code cluster.index.restrict.replication.type} is set to true. + * + * @param requestSettings settings passed in during index create request + * @param clusterSettings cluster setting + */ + private static Optional<String> validateIndexReplicationTypeSettings(Settings requestSettings, ClusterSettings clusterSettings) { + if (clusterSettings.get(IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING) + && requestSettings.hasValue(SETTING_REPLICATION_TYPE) + && requestSettings.get(INDEX_REPLICATION_TYPE_SETTING.getKey()) + .equals(clusterSettings.get(CLUSTER_REPLICATION_TYPE_SETTING).name()) == false) { + return Optional.of( + "index setting [index.replication.type] is not allowed to be set as [" + + IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey() + + "=true]" + ); + } + return Optional.empty(); + } + /** * Validates the settings and mappings for shrinking an index. * diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index ab0ea89f4734d..fa4b0f475edc5 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -701,7 +701,8 @@ public void apply(Settings value, Settings current, Settings previous) { AdmissionControlSettings.ADMISSION_CONTROL_TRANSPORT_LAYER_MODE, CpuBasedAdmissionControllerSettings.CPU_BASED_ADMISSION_CONTROLLER_TRANSPORT_LAYER_MODE, CpuBasedAdmissionControllerSettings.INDEXING_CPU_USAGE_LIMIT, - CpuBasedAdmissionControllerSettings.SEARCH_CPU_USAGE_LIMIT + CpuBasedAdmissionControllerSettings.SEARCH_CPU_USAGE_LIMIT, + IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING ) ) ); diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 3d1794f8d3197..5c3beaf8509bd 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -301,6 +301,19 @@ public class IndicesService extends AbstractLifecycleComponent Property.Final ); + /** + * If enabled, this setting enforces that indexes will be created with a replication type matching the cluster setting + * defined in cluster.indices.replication.strategy by rejecting any request that specifies a replication type that + * does not match the cluster setting. If disabled, a user can choose a replication type on a per-index basis using + * the index.replication.type setting. + */ + public static final Setting<Boolean> CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING = Setting.boolSetting( + "cluster.index.restrict.replication.type", + false, + Property.NodeScope, + Property.Final + ); + /** * The node's settings. */ diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index c4a782209421b..cea151748bfb6 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -71,6 +71,7 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.translog.Translog; +import org.opensearch.indices.IndexCreationException; import org.opensearch.indices.IndicesService; import org.opensearch.indices.InvalidAliasNameException; import org.opensearch.indices.InvalidIndexNameException; @@ -137,6 +138,7 @@ import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.opensearch.index.IndexSettings.INDEX_TRANSLOG_DURABILITY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_DEFAULT_INDEX_REFRESH_INTERVAL_SETTING; +import static org.opensearch.indices.IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_MINIMUM_INDEX_REFRESH_INTERVAL_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_INDEX_RESTRICT_ASYNC_DURABILITY_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; @@ -166,6 +168,9 @@ public class MetadataCreateIndexServiceTests extends OpenSearchTestCase { private static final String translogRepositoryNameAttributeKey = NODE_ATTRIBUTES.getKey() + REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; + final String REPLICATION_MISMATCH_VALIDATION_ERROR = + "Validation Failed: 1: index setting [index.replication.type] is not allowed to be set as [cluster.index.restrict.replication.type=true];"; + @Before public void setup() throws Exception { super.setUp(); @@ -1239,6 +1244,105 @@ public void testIndexTemplateReplicationType() { assertEquals(ReplicationType.SEGMENT.toString(), indexSettings.get(INDEX_REPLICATION_TYPE_SETTING.getKey())); } + public void testClusterForceReplicationTypeInAggregateSettings() { + Settings settings = Settings.builder() + .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + Settings nonMatchingReplicationIndexSettings = Settings.builder() + .put(INDEX_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT) + .build(); + request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); + request.settings(nonMatchingReplicationIndexSettings); + IndexCreationException exception = expectThrows( + IndexCreationException.class, + () -> aggregateIndexSettings( + ClusterState.EMPTY_STATE, + request, + Settings.EMPTY, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Collections.emptySet(), + clusterSettings + ) + ); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getCause().getMessage()); + + Settings matchingReplicationIndexSettings = Settings.builder() + .put(INDEX_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .build(); + request.settings(matchingReplicationIndexSettings); + Settings aggregateIndexSettings = aggregateIndexSettings( + ClusterState.EMPTY_STATE, + request, + Settings.EMPTY, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Collections.emptySet(), + clusterSettings + ); + assertEquals(ReplicationType.SEGMENT.toString(), aggregateIndexSettings.get(INDEX_REPLICATION_TYPE_SETTING.getKey())); + } + + public void testClusterForceReplicationTypeInValidateIndexSettings() { + ClusterService clusterService = mock(ClusterService.class); + Metadata metadata = Metadata.builder() + .transientSettings(Settings.builder().put(Metadata.DEFAULT_REPLICA_COUNT_SETTING.getKey(), 1).build()) + .build(); + ClusterState clusterState = ClusterState.builder(org.opensearch.cluster.ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY)) + .metadata(metadata) + .build(); + ThreadPool threadPool = new TestThreadPool(getTestName()); + // Enforce cluster level replication type setting + final Settings forceClusterSettingEnabled = Settings.builder() + .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), true) + .build(); + ClusterSettings clusterSettings = new ClusterSettings(forceClusterSettingEnabled, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + when(clusterService.getSettings()).thenReturn(forceClusterSettingEnabled); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + when(clusterService.state()).thenReturn(clusterState); + + final MetadataCreateIndexService checkerService = new MetadataCreateIndexService( + forceClusterSettingEnabled, + clusterService, + null, + null, + null, + createTestShardLimitService(randomIntBetween(1, 1000), false, clusterService), + new Environment(Settings.builder().put("path.home", "dummy").build(), null), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + threadPool, + null, + new SystemIndices(Collections.emptyMap()), + true, + new AwarenessReplicaBalance(forceClusterSettingEnabled, clusterService.getClusterSettings()) + ); + // Use DOCUMENT replication type setting for index creation + final Settings indexSettings = Settings.builder().put(INDEX_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT).build(); + + IndexCreationException exception = expectThrows( + IndexCreationException.class, + () -> checkerService.validateIndexSettings("test", indexSettings, false) + ); + assertEquals(REPLICATION_MISMATCH_VALIDATION_ERROR, exception.getCause().getMessage()); + + // Cluster level replication type setting not enforced + final Settings forceClusterSettingDisabled = Settings.builder() + .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.SEGMENT) + .put(CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING.getKey(), false) + .build(); + clusterSettings = new ClusterSettings(forceClusterSettingDisabled, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + when(clusterService.getClusterSettings()).thenReturn(clusterSettings); + checkerService.validateIndexSettings("test", indexSettings, false); + threadPool.shutdown(); + } + public void testRemoteStoreNoUserOverrideExceptReplicationTypeSegmentIndexSettings() { Settings settings = Settings.builder() .put(CLUSTER_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT) From 02beac28363ef43f2e4437496e0158ec035e4f63 Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Thu, 21 Dec 2023 15:34:12 -0600 Subject: [PATCH 028/178] Fix CHANGELOG entries that were added to wrong section (#11662) Signed-off-by: Andrew Ross <andrross@amazon.com> --- CHANGELOG.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ca10ecf57e2b6..3c4a6f198e286 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,14 +9,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add getter for path field in NestedQueryBuilder ([#4636](https://github.com/opensearch-project/OpenSearch/pull/4636)) - Allow mmap to use new JDK-19 preview APIs in Apache Lucene 9.4+ ([#5151](https://github.com/opensearch-project/OpenSearch/pull/5151)) - Add events correlation engine plugin ([#6854](https://github.com/opensearch-project/OpenSearch/issues/6854)) -- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107)) - Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679), [#10664](https://github.com/opensearch-project/OpenSearch/pull/10664)) - Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618)) - [AdmissionControl] Added changes for AdmissionControl Interceptor and AdmissionControlService for RateLimiting ([#9286](https://github.com/opensearch-project/OpenSearch/pull/9286)) - GHA to verify checklist items completion in PR descriptions ([#10800](https://github.com/opensearch-project/OpenSearch/pull/10800)) - Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625)) - [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887)) -- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378)) ### Dependencies - Bump `log4j-core` from 2.18.0 to 2.19.0 @@ -46,7 +44,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.bouncycastle:bcmail-jdk15on` to `org.bouncycastle:bcmail-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247)) - Bump `org.bouncycastle:bcpkix-jdk15on` to `org.bouncycastle:bcpkix-jdk15to18` version 1.75 ([#8247](https://github.com/opensearch-project/OpenSearch/pull/8247)) - Bump JNA version from 5.5 to 5.13 ([#9963](https://github.com/opensearch-project/OpenSearch/pull/9963)) -- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) - Bump `org.eclipse.jgit` from 6.5.0 to 6.7.0 ([#10147](https://github.com/opensearch-project/OpenSearch/pull/10147)) - Bump OpenTelemetry from 1.30.1 to 1.31.0 ([#10617](https://github.com/opensearch-project/OpenSearch/pull/10617)) - Bump OpenTelemetry from 1.31.0 to 1.32.0 and OpenTelemetry Semconv from 1.21.0-alpha to 1.23.1-alpha ([#11305](https://github.com/opensearch-project/OpenSearch/pull/11305)) @@ -59,7 +56,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Improve summary error message for invalid setting updates ([#4792](https://github.com/opensearch-project/OpenSearch/pull/4792)) - Return 409 Conflict HTTP status instead of 503 on failure to concurrently execute snapshots ([#8986](https://github.com/opensearch-project/OpenSearch/pull/5855)) - Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/)) -- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558)) - Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894)) @@ -85,8 +81,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827)) - Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944)) - Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993)) -- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873)) -- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249)) ### Security @@ -120,11 +114,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317)) - Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) - [BWC and API enforcement] Introduce checks for enforcing the API restrictions ([#11175](https://github.com/opensearch-project/OpenSearch/pull/11175)) +- Maintainer approval check ([#11378](https://github.com/opensearch-project/OpenSearch/pull/11378)) - Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) ### Dependencies +- Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) - Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) @@ -160,20 +156,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) - Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036)) - Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)) +- Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107)) - Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598)) - [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524)) - [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642)) - Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395)) +- Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558)) - Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273)) - Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895)) -- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) - Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057)) +- Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) - Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) - Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087)) - Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528)) +- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) - Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) -- Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) - Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) @@ -188,14 +186,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370)) - Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496)) - Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737)) -- Fix SuggestSearch.testSkipDuplicates by forceing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068)) - Delegating CachingWeightWrapper#count to internal weight object ([#10543](https://github.com/opensearch-project/OpenSearch/pull/10543)) - Fix per request latency last phase not tracked ([#10934](https://github.com/opensearch-project/OpenSearch/pull/10934)) -- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) +- Fix SuggestSearch.testSkipDuplicates by forcing refresh when indexing its test documents ([#11068](https://github.com/opensearch-project/OpenSearch/pull/11068)) +- [BUG] Fix the thread context that is not properly cleared and messes up the traces ([#10873](https://github.com/opensearch-project/OpenSearch/pull/10873)) +- Handle canMatchSearchAfter for frozen context scenario ([#11249](https://github.com/opensearch-project/OpenSearch/pull/11249)) - Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316)) - Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369)) - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) +- Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) From a3524b3351a6261fdeb5610f7eac02a7d327247a Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Thu, 21 Dec 2023 15:44:39 -0600 Subject: [PATCH 029/178] Remove unused java11 gradle configuration (#11661) The java11-specific sources were removed back in #2898 and as far as I can tell this configuration has been unneeded since that point. Signed-off-by: Andrew Ross <andrross@amazon.com> --- server/build.gradle | 39 --------------------------------------- 1 file changed, 39 deletions(-) diff --git a/server/build.gradle b/server/build.gradle index bf0c7791aeb55..e36498bf1038b 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -57,45 +57,6 @@ sourceSets { } } } -// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs -if (!isEclipse) { - sourceSets { - java11 { - java { - srcDirs = ['src/main/java11'] - } - } - } - - configurations { - java11Implementation.extendsFrom(api) - } - - dependencies { - java11Implementation sourceSets.main.output - } - - compileJava11Java { - sourceCompatibility = JavaVersion.VERSION_11 - targetCompatibility = JavaVersion.VERSION_11 - } - - tasks.named('forbiddenApisJava11').configure { - doFirst { - if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11 - } - } - } - - jar { - metaInf { - into 'versions/11' - from sourceSets.java11.output - } - manifest.attributes('Multi-Release': 'true') - } -} dependencies { From fe46dde351835f34f59c5e357e0a3b71d9a31f18 Mon Sep 17 00:00:00 2001 From: Taeik Lim <sibera21@gmail.com> Date: Fri, 22 Dec 2023 07:15:46 +0900 Subject: [PATCH 030/178] Change visibility of action constructor (public -> private) (#11659) - MainAction - RemoteInfoAction Signed-off-by: Taeik Lim <sibera21@gmail.com> --- .../action/admin/cluster/remote/RemoteInfoAction.java | 2 +- server/src/main/java/org/opensearch/action/main/MainAction.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/remote/RemoteInfoAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/remote/RemoteInfoAction.java index 55f75a142a53c..7e4911c10c50e 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/remote/RemoteInfoAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/remote/RemoteInfoAction.java @@ -44,7 +44,7 @@ public final class RemoteInfoAction extends ActionType<RemoteInfoResponse> { public static final String NAME = "cluster:monitor/remote/info"; public static final RemoteInfoAction INSTANCE = new RemoteInfoAction(); - public RemoteInfoAction() { + private RemoteInfoAction() { super(NAME, RemoteInfoResponse::new); } } diff --git a/server/src/main/java/org/opensearch/action/main/MainAction.java b/server/src/main/java/org/opensearch/action/main/MainAction.java index c5cbac824ec83..28a31a92d7f16 100644 --- a/server/src/main/java/org/opensearch/action/main/MainAction.java +++ b/server/src/main/java/org/opensearch/action/main/MainAction.java @@ -44,7 +44,7 @@ public class MainAction extends ActionType<MainResponse> { public static final String NAME = "cluster:monitor/main"; public static final MainAction INSTANCE = new MainAction(); - public MainAction() { + private MainAction() { super(NAME, MainResponse::new); } } From f92f846a1f9b30a055dde846fd12d987a511723a Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Thu, 21 Dec 2023 18:13:52 -0600 Subject: [PATCH 031/178] Remove unused java11 gradle configuration from lib/core (#11665) This is the same unused java11 configuration that was removed from the server module in #11661. Signed-off-by: Andrew Ross <andrross@amazon.com> --- libs/core/build.gradle | 39 --------------------------------------- 1 file changed, 39 deletions(-) diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 4850b5aea5c85..0cf2cd0bf92b6 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -36,45 +36,6 @@ base { archivesName = 'opensearch-core' } -// we want to keep the JDKs in our IDEs set to JDK 8 until minimum JDK is bumped to 11 so we do not include this source set in our IDEs -if (!isEclipse) { - sourceSets { - java11 { - java { - srcDirs = ['src/main/java11'] - } - } - } - - configurations { - java11Compile.extendsFrom(compile) - } - - dependencies { - java11Implementation sourceSets.main.output - } - - compileJava11Java { - sourceCompatibility = JavaVersion.VERSION_11 - targetCompatibility = JavaVersion.VERSION_11 - } - - forbiddenApisJava11 { - if (BuildParams.runtimeJavaVersion < JavaVersion.VERSION_11) { - targetCompatibility = JavaVersion.VERSION_11 - } - replaceSignatureFiles 'jdk-signatures' - } - - jar { - metaInf { - into 'versions/11' - from sourceSets.java11.output - } - manifest.attributes('Multi-Release': 'true') - } -} - dependencies { api project(':libs:opensearch-common') From f7d291456810603f259a243aee98188be5c7070a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Dec 2023 17:44:16 -0500 Subject: [PATCH 032/178] Bump com.netflix.nebula:nebula-publishing-plugin from 20.3.0 to 21.0.0 (#11671) * Bump com.netflix.nebula:nebula-publishing-plugin from 20.3.0 to 21.0.0 Bumps [com.netflix.nebula:nebula-publishing-plugin](https://github.com/nebula-plugins/nebula-publishing-plugin) from 20.3.0 to 21.0.0. - [Release notes](https://github.com/nebula-plugins/nebula-publishing-plugin/releases) - [Changelog](https://github.com/nebula-plugins/nebula-publishing-plugin/blob/main/CHANGELOG.md) - [Commits](https://github.com/nebula-plugins/nebula-publishing-plugin/compare/v20.3.0...v21.0.0) --- updated-dependencies: - dependency-name: com.netflix.nebula:nebula-publishing-plugin dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c4a6f198e286..dcd645065b488 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -151,6 +151,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.wiremock:wiremock-standalone` from 3.1.0 to 3.3.1 ([#11555](https://github.com/opensearch-project/OpenSearch/pull/11555)) - Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) - Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) +- Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 0efa170250a7b..40ab0801223b1 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -106,7 +106,7 @@ dependencies { api 'org.apache.commons:commons-compress:1.25.0' api 'org.apache.ant:ant:1.10.14' api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0' - api 'com.netflix.nebula:nebula-publishing-plugin:20.3.0' + api 'com.netflix.nebula:nebula-publishing-plugin:21.0.0' api 'com.netflix.nebula:gradle-info-plugin:12.1.6' api 'org.apache.rat:apache-rat:0.15' api 'commons-io:commons-io:2.15.1' From bb4602daa82e7ebfc8b4fba602dd7d36e5e1c241 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 09:32:38 -0500 Subject: [PATCH 033/178] Bump commons-io:commons-io from 2.15.0 to 2.15.1 in /plugins/ingest-attachment (#11560) * Bump commons-io:commons-io in /plugins/ingest-attachment Bumps commons-io:commons-io from 2.15.0 to 2.15.1. --- updated-dependencies: - dependency-name: commons-io:commons-io dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- plugins/ingest-attachment/build.gradle | 2 +- plugins/ingest-attachment/licenses/commons-io-2.15.0.jar.sha1 | 1 - plugins/ingest-attachment/licenses/commons-io-2.15.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/commons-io-2.15.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-io-2.15.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index dcd645065b488..c0421f0c1d8fe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -122,7 +122,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) -- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554)) +- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) - Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 57a2493053956..22db73ad86796 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -57,7 +57,7 @@ dependencies { runtimeOnly "com.google.guava:guava:${versions.guava}" // Other dependencies api 'org.tukaani:xz:1.9' - api 'commons-io:commons-io:2.15.0' + api 'commons-io:commons-io:2.15.1' api "org.slf4j:slf4j-api:${versions.slf4j}" // character set detection diff --git a/plugins/ingest-attachment/licenses/commons-io-2.15.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.15.0.jar.sha1 deleted file mode 100644 index 73709383fd130..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-io-2.15.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5c3c2db10f6f797430a7f9c696b4d1273768c924 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.15.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.15.1.jar.sha1 new file mode 100644 index 0000000000000..47c5d13812a36 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-io-2.15.1.jar.sha1 @@ -0,0 +1 @@ +f11560da189ab563a5c8e351941415430e9304ea \ No newline at end of file From b9022c5dcf6fdc8dc538bcfbb8fd7889982c25c1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 10:52:01 -0500 Subject: [PATCH 034/178] Bump commons-cli:commons-cli from 1.5.0 to 1.6.0 in /plugins/repository-hdfs (#10996) * Bump commons-cli:commons-cli in /plugins/repository-hdfs Bumps commons-cli:commons-cli from 1.5.0 to 1.6.0. --- updated-dependencies: - dependency-name: commons-cli:commons-cli dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/commons-cli-1.5.0.jar.sha1 | 1 - plugins/repository-hdfs/licenses/commons-cli-1.6.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/commons-cli-1.5.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-cli-1.6.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index c0421f0c1d8fe..dbb9216f5d40e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -152,6 +152,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.25.0 ([#11556](https://github.com/opensearch-project/OpenSearch/pull/11556)) - Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) - Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) +- Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index ed1f54888a26f..7736390d58fe1 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -70,7 +70,7 @@ dependencies { api 'com.google.code.gson:gson:2.10.1' runtimeOnly "com.google.guava:guava:${versions.guava}" api "commons-logging:commons-logging:${versions.commonslogging}" - api 'commons-cli:commons-cli:1.5.0' + api 'commons-cli:commons-cli:1.6.0' api "commons-codec:commons-codec:${versions.commonscodec}" api 'commons-collections:commons-collections:3.2.2' api "org.apache.commons:commons-compress:${versions.commonscompress}" diff --git a/plugins/repository-hdfs/licenses/commons-cli-1.5.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-cli-1.5.0.jar.sha1 deleted file mode 100644 index 8f9e064eda2d0..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-cli-1.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc98be5d5390230684a092589d70ea76a147925c \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-cli-1.6.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-cli-1.6.0.jar.sha1 new file mode 100644 index 0000000000000..bb94eda6814ea --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-cli-1.6.0.jar.sha1 @@ -0,0 +1 @@ +38166a23afb5bd5520f739b87b3be87f7f0fb96d \ No newline at end of file From 71d7e4daf7dc05c9598d447e4424072e76de79d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 26 Dec 2023 14:43:25 -0500 Subject: [PATCH 035/178] Bump com.maxmind.geoip2:geoip2 from 4.1.0 to 4.2.0 in /modules/ingest-geoip (#11559) * Bump com.maxmind.geoip2:geoip2 in /modules/ingest-geoip Bumps [com.maxmind.geoip2:geoip2](https://github.com/maxmind/GeoIP2-java) from 4.1.0 to 4.2.0. - [Release notes](https://github.com/maxmind/GeoIP2-java/releases) - [Changelog](https://github.com/maxmind/GeoIP2-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/maxmind/GeoIP2-java/compare/v4.1.0...v4.2.0) --- updated-dependencies: - dependency-name: com.maxmind.geoip2:geoip2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + modules/ingest-geoip/build.gradle | 2 +- modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 | 1 - modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index dbb9216f5d40e..2b70e225e715a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -153,6 +153,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `actions/stale` from 8 to 9 ([#11557](https://github.com/opensearch-project/OpenSearch/pull/11557)) - Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) - Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) +- Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index e126cf37e33a2..45b2ff74a5826 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -39,7 +39,7 @@ opensearchplugin { } dependencies { - api('com.maxmind.geoip2:geoip2:4.1.0') + api('com.maxmind.geoip2:geoip2:4.2.0') // geoip2 dependencies: api('com.maxmind.db:maxmind-db:3.0.0') api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") diff --git a/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 deleted file mode 100644 index 0d124299e4cfb..0000000000000 --- a/modules/ingest-geoip/licenses/geoip2-4.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b6b356cc91863409ba3475a148ee11a3a6d6aa4b \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1 new file mode 100644 index 0000000000000..b6bfeeb9da60b --- /dev/null +++ b/modules/ingest-geoip/licenses/geoip2-4.2.0.jar.sha1 @@ -0,0 +1 @@ +78ff932dc13ac41dd1f0fd9e7405a7f4ad815ce0 \ No newline at end of file From 316b60afde91f3cf7d026b6a53670ef63f09d679 Mon Sep 17 00:00:00 2001 From: Ticheng Lin <51488860+ticheng-aws@users.noreply.github.com> Date: Wed, 27 Dec 2023 14:30:22 -0800 Subject: [PATCH 036/178] Fix flaky testPostFilterDisablesCountOptimization with concurrent search enabled (#11674) Signed-off-by: Ticheng Lin <ticheng@amazon.com> --- .../search/query/QueryProfilePhaseTests.java | 44 ++++++++++++++----- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 62dcf54e25578..ba1600e6eb651 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -1401,12 +1401,22 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); if (executor != null) { - assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); - assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(2L)); - assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(6L)); + long maxScore = query.getTimeBreakdown().get("max_score"); + long minScore = query.getTimeBreakdown().get("min_score"); + long avgScore = query.getTimeBreakdown().get("avg_score"); + long maxScoreCount = query.getTimeBreakdown().get("max_score_count"); + long minScoreCount = query.getTimeBreakdown().get("min_score_count"); + long avgScoreCount = query.getTimeBreakdown().get("avg_score_count"); + assertThat(maxScore, greaterThan(0L)); + assertThat(minScore, greaterThan(0L)); + assertThat(avgScore, greaterThan(0L)); + assertThat(maxScore, greaterThanOrEqualTo(avgScore)); + assertThat(avgScore, greaterThanOrEqualTo(minScore)); + assertThat(maxScoreCount, greaterThan(0L)); + assertThat(minScoreCount, greaterThan(0L)); + assertThat(avgScoreCount, greaterThan(0L)); + assertThat(maxScoreCount, greaterThanOrEqualTo(avgScoreCount)); + assertThat(avgScoreCount, greaterThanOrEqualTo(minScoreCount)); } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -1436,12 +1446,22 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); if (executor != null) { - assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); - assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(2L)); - assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(6L)); + long maxScore = query.getTimeBreakdown().get("max_score"); + long minScore = query.getTimeBreakdown().get("min_score"); + long avgScore = query.getTimeBreakdown().get("avg_score"); + long maxScoreCount = query.getTimeBreakdown().get("max_score_count"); + long minScoreCount = query.getTimeBreakdown().get("min_score_count"); + long avgScoreCount = query.getTimeBreakdown().get("avg_score_count"); + assertThat(maxScore, greaterThan(0L)); + assertThat(minScore, greaterThan(0L)); + assertThat(avgScore, greaterThan(0L)); + assertThat(maxScore, greaterThanOrEqualTo(avgScore)); + assertThat(avgScore, greaterThanOrEqualTo(minScore)); + assertThat(maxScoreCount, greaterThan(0L)); + assertThat(minScoreCount, greaterThan(0L)); + assertThat(avgScoreCount, greaterThan(0L)); + assertThat(maxScoreCount, greaterThanOrEqualTo(avgScoreCount)); + assertThat(avgScoreCount, greaterThanOrEqualTo(minScoreCount)); } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); From 60b2265d9390f27f80803f16eb4d1c5cdc0f4947 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Thu, 28 Dec 2023 22:11:09 +0800 Subject: [PATCH 037/178] Update supported version for max_shard_size parameter in Shrink API (#11439) * Update supported version for max_shard_size parameter in Shrink API Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Modify changelog Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../rest-api-spec/test/indices.shrink/40_max_shard_size.yml | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b70e225e715a..79fd7d04eadae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -81,6 +81,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827)) - Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944)) - Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993)) +- Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) ### Security diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/40_max_shard_size.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/40_max_shard_size.yml index 32ac11097d3dc..bac2898ccea1c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/40_max_shard_size.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.shrink/40_max_shard_size.yml @@ -4,8 +4,8 @@ # number_of_shards for the target index. - skip: - version: " - 2.99.99" - reason: "only available in 3.0+" + version: " - 2.4.99" + reason: "max_shard_size was introduced in 2.5.0" features: allowed_warnings - do: From 2dec5dc29bcdb39b15cdb2b1508f36d6ebc13188 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 17:34:39 -0500 Subject: [PATCH 038/178] Bump org.apache.commons:commons-lang3 from 3.13.0 to 3.14.0 in /plugins/repository-hdfs (#11691) * Bump org.apache.commons:commons-lang3 in /plugins/repository-hdfs Bumps org.apache.commons:commons-lang3 from 3.13.0 to 3.14.0. --- updated-dependencies: - dependency-name: org.apache.commons:commons-lang3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 | 1 - plugins/repository-hdfs/licenses/commons-lang3-3.14.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-lang3-3.14.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 79fd7d04eadae..050c0ad96c75f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -155,6 +155,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.netflix.nebula:nebula-publishing-plugin` from 20.3.0 to 21.0.0 ([#11671](https://github.com/opensearch-project/OpenSearch/pull/11671)) - Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) - Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) +- Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 7736390d58fe1..f04d42a2155d6 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -76,7 +76,7 @@ dependencies { api "org.apache.commons:commons-compress:${versions.commonscompress}" api 'org.apache.commons:commons-configuration2:2.9.0' api 'commons-io:commons-io:2.14.0' - api 'org.apache.commons:commons-lang3:3.13.0' + api 'org.apache.commons:commons-lang3:3.14.0' implementation 'com.google.re2j:re2j:1.7' api 'javax.servlet:servlet-api:2.5' api "org.slf4j:slf4j-api:${versions.slf4j}" diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 deleted file mode 100644 index d0c2f2486ee1f..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-lang3-3.13.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7263237aa89c1f99b327197c41d0669707a462e \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-lang3-3.14.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-lang3-3.14.0.jar.sha1 new file mode 100644 index 0000000000000..d783e07e40902 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-lang3-3.14.0.jar.sha1 @@ -0,0 +1 @@ +1ed471194b02f2c6cb734a0cd6f6f107c673afae \ No newline at end of file From a37acba3a4946a117f65183add6688a9382afa12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 17:37:40 -0500 Subject: [PATCH 039/178] Bump org.apache.logging.log4j:log4j-core from 2.22.0 to 2.22.1 in /buildSrc/src/testKit/thirdPartyAudit/sample_jars (#11695) * Bump org.apache.logging.log4j:log4j-core Bumps org.apache.logging.log4j:log4j-core from 2.22.0 to 2.22.1. --- updated-dependencies: - dependency-name: org.apache.logging.log4j:log4j-core dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 050c0ad96c75f..a7fee08a7420b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -132,7 +132,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.bouncycastle:bc-fips` from 1.0.2.3 to 1.0.2.4 ([#10297](https://github.com/opensearch-project/OpenSearch/pull/10297)) - Bump `com.google.http-client:google-http-client` from 1.43.2 to 1.43.3 ([#10635](https://github.com/opensearch-project/OpenSearch/pull/10635)) - Bump `com.squareup.okio:okio` from 3.5.0 to 3.7.0 ([#10637](https://github.com/opensearch-project/OpenSearch/pull/10637), [#11632](https://github.com/opensearch-project/OpenSearch/pull/11632)) -- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.0 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270)) +- Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695)) - Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504)) - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) - Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index f24b61ef0d165..351b42e5bc921 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -17,7 +17,7 @@ repositories { } dependencies { - implementation "org.apache.logging.log4j:log4j-core:2.22.0" + implementation "org.apache.logging.log4j:log4j-core:2.22.1" } ["0.0.1", "0.0.2"].forEach { v -> From 5ccd134fb9faf5fe70db6b6f6f274359ddcaabcb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Jan 2024 20:42:29 -0500 Subject: [PATCH 040/178] Bump com.maxmind.db:maxmind-db from 3.0.0 to 3.1.0 in /modules/ingest-geoip (#11693) * Bump com.maxmind.db:maxmind-db in /modules/ingest-geoip Bumps [com.maxmind.db:maxmind-db](https://github.com/maxmind/MaxMind-DB-Reader-java) from 3.0.0 to 3.1.0. - [Release notes](https://github.com/maxmind/MaxMind-DB-Reader-java/releases) - [Changelog](https://github.com/maxmind/MaxMind-DB-Reader-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/maxmind/MaxMind-DB-Reader-java/compare/v3.0.0...v3.1.0) --- updated-dependencies: - dependency-name: com.maxmind.db:maxmind-db dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + modules/ingest-geoip/build.gradle | 2 +- modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1 | 1 - modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index a7fee08a7420b..6a1937c807a25 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -156,6 +156,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `commons-cli:commons-cli` from 1.5.0 to 1.6.0 ([#10996](https://github.com/opensearch-project/OpenSearch/pull/10996)) - Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) - Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) +- Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 45b2ff74a5826..c0ff155ce1038 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -41,7 +41,7 @@ opensearchplugin { dependencies { api('com.maxmind.geoip2:geoip2:4.2.0') // geoip2 dependencies: - api('com.maxmind.db:maxmind-db:3.0.0') + api('com.maxmind.db:maxmind-db:3.1.0') api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}") diff --git a/modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1 deleted file mode 100644 index 89b0c4c49b450..0000000000000 --- a/modules/ingest-geoip/licenses/maxmind-db-3.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -79dcda62168a77caf595f8fda101baa17fef125d \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1 new file mode 100644 index 0000000000000..9db7c7319af0b --- /dev/null +++ b/modules/ingest-geoip/licenses/maxmind-db-3.1.0.jar.sha1 @@ -0,0 +1 @@ +2008992ab45d61c7b28a18678b5df82272529da3 \ No newline at end of file From 6a01d2f95b7e98c601949a58b92b91e3e4022e17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 08:07:20 -0500 Subject: [PATCH 041/178] Bump com.avast.gradle:gradle-docker-compose-plugin from 0.17.5 to 0.17.6 (#11692) * Bump com.avast.gradle:gradle-docker-compose-plugin from 0.17.5 to 0.17.6 Bumps [com.avast.gradle:gradle-docker-compose-plugin](https://github.com/avast/gradle-docker-compose-plugin) from 0.17.5 to 0.17.6. - [Release notes](https://github.com/avast/gradle-docker-compose-plugin/releases) - [Commits](https://github.com/avast/gradle-docker-compose-plugin/compare/0.17.5...0.17.6) --- updated-dependencies: - dependency-name: com.avast.gradle:gradle-docker-compose-plugin dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6a1937c807a25..579d2695d3125 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -138,7 +138,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) - Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) - Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294)) -- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.5 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163)) +- Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) - Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 40ab0801223b1..a42976fef572c 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -115,7 +115,7 @@ dependencies { api 'org.jdom:jdom2:2.0.6.1' api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}" api 'de.thetaphi:forbiddenapis:3.6' - api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.5' + api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' api 'com.networknt:json-schema-validator:1.0.86' From 63f4f1397f2e47e717f8797eae1e6516360c4052 Mon Sep 17 00:00:00 2001 From: Gaurav Chandani <chngau@amazon.com> Date: Tue, 2 Jan 2024 23:14:16 +0530 Subject: [PATCH 042/178] Batch Async Fetcher class changes (#8742) * Async Fetcher class changes Signed-off-by: Gaurav Chandani <chngau@amazon.com> --- .../TransportIndicesShardStoresAction.java | 6 +- .../opensearch/gateway/AsyncShardFetch.java | 140 +++++++++++++----- .../opensearch/gateway/GatewayAllocator.java | 25 +++- ...ransportNodesListGatewayStartedShards.java | 11 +- .../indices/store/ShardAttributes.java | 59 ++++++++ .../TransportNodesListShardStoreMetadata.java | 10 +- .../gateway/AsyncShardFetchTests.java | 80 ++++++---- .../gateway/PrimaryShardAllocatorTests.java | 6 +- .../gateway/ReplicaShardAllocatorTests.java | 6 +- .../indices/store/ShardAttributesTests.java | 49 ++++++ .../test/gateway/TestGatewayAllocator.java | 12 +- 11 files changed, 319 insertions(+), 85 deletions(-) create mode 100644 server/src/main/java/org/opensearch/indices/store/ShardAttributes.java create mode 100644 server/src/test/java/org/opensearch/indices/store/ShardAttributesTests.java diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java index 41225bc362235..04166c88a00ad 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shards/TransportIndicesShardStoresAction.java @@ -195,7 +195,7 @@ void start() { } else { for (Tuple<ShardId, String> shard : shards) { InternalAsyncFetch fetch = new InternalAsyncFetch(logger, "shard_stores", shard.v1(), shard.v2(), listShardStoresInfo); - fetch.fetchData(nodes, Collections.<String>emptySet()); + fetch.fetchData(nodes, Collections.emptyMap()); } } } @@ -223,7 +223,7 @@ protected synchronized void processAsyncFetch( List<FailedNodeException> failures, long fetchingRound ) { - fetchResponses.add(new Response(shardId, responses, failures)); + fetchResponses.add(new Response(shardAttributesMap.keySet().iterator().next(), responses, failures)); if (expectedOps.countDown()) { finish(); } @@ -312,7 +312,7 @@ private boolean shardExistsInNode(final NodeGatewayStartedShards response) { } @Override - protected void reroute(ShardId shardId, String reason) { + protected void reroute(String shardId, String reason) { // no-op } diff --git a/server/src/main/java/org/opensearch/gateway/AsyncShardFetch.java b/server/src/main/java/org/opensearch/gateway/AsyncShardFetch.java index d0ade4eb25168..50774f7e0cb1c 100644 --- a/server/src/main/java/org/opensearch/gateway/AsyncShardFetch.java +++ b/server/src/main/java/org/opensearch/gateway/AsyncShardFetch.java @@ -46,6 +46,7 @@ import org.opensearch.core.action.ActionListener; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.indices.store.ShardAttributes; import org.opensearch.transport.ReceiveTimeoutTransportException; import java.util.ArrayList; @@ -54,12 +55,11 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import static java.util.Collections.emptySet; -import static java.util.Collections.unmodifiableSet; +import static java.util.Collections.emptyMap; +import static java.util.Collections.unmodifiableMap; /** * Allows to asynchronously fetch shard related data from other nodes for allocation, without blocking @@ -69,6 +69,7 @@ * and once the results are back, it makes sure to schedule a reroute to make sure those results will * be taken into account. * + * It comes in two modes, to single fetch a shard or fetch a batch of shards. * @opensearch.internal */ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Releasable { @@ -77,18 +78,21 @@ public abstract class AsyncShardFetch<T extends BaseNodeResponse> implements Rel * An action that lists the relevant shard data that needs to be fetched. */ public interface Lister<NodesResponse extends BaseNodesResponse<NodeResponse>, NodeResponse extends BaseNodeResponse> { - void list(ShardId shardId, @Nullable String customDataPath, DiscoveryNode[] nodes, ActionListener<NodesResponse> listener); + void list(Map<ShardId, ShardAttributes> shardAttributesMap, DiscoveryNode[] nodes, ActionListener<NodesResponse> listener); + } protected final Logger logger; protected final String type; - protected final ShardId shardId; - protected final String customDataPath; + protected final Map<ShardId, ShardAttributes> shardAttributesMap; private final Lister<BaseNodesResponse<T>, T> action; private final Map<String, NodeEntry<T>> cache = new HashMap<>(); - private final Set<String> nodesToIgnore = new HashSet<>(); private final AtomicLong round = new AtomicLong(); private boolean closed; + private final String reroutingKey; + private final Map<ShardId, Set<String>> shardToIgnoreNodes = new HashMap<>(); + + private final boolean enableBatchMode; @SuppressWarnings("unchecked") protected AsyncShardFetch( @@ -100,9 +104,36 @@ protected AsyncShardFetch( ) { this.logger = logger; this.type = type; - this.shardId = Objects.requireNonNull(shardId); - this.customDataPath = Objects.requireNonNull(customDataPath); + shardAttributesMap = new HashMap<>(); + shardAttributesMap.put(shardId, new ShardAttributes(shardId, customDataPath)); + this.action = (Lister<BaseNodesResponse<T>, T>) action; + this.reroutingKey = "ShardId=[" + shardId.toString() + "]"; + enableBatchMode = false; + } + + /** + * Added to fetch a batch of shards from nodes + * + * @param logger Logger + * @param type type of action + * @param shardAttributesMap Map of {@link ShardId} to {@link ShardAttributes} to perform fetching on them a + * @param action Transport Action + * @param batchId For the given ShardAttributesMap, we expect them to tie with a single batch id for logging and later identification + */ + @SuppressWarnings("unchecked") + protected AsyncShardFetch( + Logger logger, + String type, + Map<ShardId, ShardAttributes> shardAttributesMap, + Lister<? extends BaseNodesResponse<T>, T> action, + String batchId + ) { + this.logger = logger; + this.type = type; + this.shardAttributesMap = shardAttributesMap; this.action = (Lister<BaseNodesResponse<T>, T>) action; + this.reroutingKey = "BatchID=[" + batchId + "]"; + enableBatchMode = true; } @Override @@ -130,11 +161,32 @@ public synchronized int getNumberOfInFlightFetches() { * The ignoreNodes are nodes that are supposed to be ignored for this round, since fetching is async, we need * to keep them around and make sure we add them back when all the responses are fetched and returned. */ - public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Set<String> ignoreNodes) { + public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Map<ShardId, Set<String>> ignoreNodes) { if (closed) { - throw new IllegalStateException(shardId + ": can't fetch data on closed async fetch"); + throw new IllegalStateException(reroutingKey + ": can't fetch data on closed async fetch"); } - nodesToIgnore.addAll(ignoreNodes); + + if (enableBatchMode == false) { + // we will do assertions here on ignoreNodes + if (ignoreNodes.size() > 1) { + throw new IllegalStateException( + "Fetching Shard Data, " + reroutingKey + "Can only have atmost one shard" + "for non-batch mode" + ); + } + if (ignoreNodes.size() == 1) { + if (shardAttributesMap.containsKey(ignoreNodes.keySet().iterator().next()) == false) { + throw new IllegalStateException("Shard Id must be same as initialized in AsyncShardFetch. Expecting = " + reroutingKey); + } + } + } + + // add the nodes to ignore to the list of nodes to ignore for each shard + for (Map.Entry<ShardId, Set<String>> ignoreNodesEntry : ignoreNodes.entrySet()) { + Set<String> ignoreNodesSet = shardToIgnoreNodes.getOrDefault(ignoreNodesEntry.getKey(), new HashSet<>()); + ignoreNodesSet.addAll(ignoreNodesEntry.getValue()); + shardToIgnoreNodes.put(ignoreNodesEntry.getKey(), ignoreNodesSet); + } + fillShardCacheWithDataNodes(cache, nodes); List<NodeEntry<T>> nodesToFetch = findNodesToFetch(cache); if (nodesToFetch.isEmpty() == false) { @@ -153,7 +205,7 @@ public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Set<String> i // if we are still fetching, return null to indicate it if (hasAnyNodeFetching(cache)) { - return new FetchResult<>(shardId, null, emptySet()); + return new FetchResult<>(null, emptyMap()); } else { // nothing to fetch, yay, build the return value Map<DiscoveryNode, T> fetchData = new HashMap<>(); @@ -177,16 +229,27 @@ public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Set<String> i } } } - Set<String> allIgnoreNodes = unmodifiableSet(new HashSet<>(nodesToIgnore)); + + Map<ShardId, Set<String>> allIgnoreNodesMap = unmodifiableMap(new HashMap<>(shardToIgnoreNodes)); // clear the nodes to ignore, we had a successful run in fetching everything we can // we need to try them if another full run is needed - nodesToIgnore.clear(); + shardToIgnoreNodes.clear(); // if at least one node failed, make sure to have a protective reroute // here, just case this round won't find anything, and we need to retry fetching data - if (failedNodes.isEmpty() == false || allIgnoreNodes.isEmpty() == false) { - reroute(shardId, "nodes failed [" + failedNodes.size() + "], ignored [" + allIgnoreNodes.size() + "]"); + + if (failedNodes.isEmpty() == false + || allIgnoreNodesMap.values().stream().anyMatch(ignoreNodeSet -> ignoreNodeSet.isEmpty() == false)) { + reroute( + reroutingKey, + "nodes failed [" + + failedNodes.size() + + "], ignored [" + + allIgnoreNodesMap.values().stream().mapToInt(Set::size).sum() + + "]" + ); } - return new FetchResult<>(shardId, fetchData, allIgnoreNodes); + + return new FetchResult<>(fetchData, allIgnoreNodesMap); } } @@ -199,10 +262,10 @@ public synchronized FetchResult<T> fetchData(DiscoveryNodes nodes, Set<String> i protected synchronized void processAsyncFetch(List<T> responses, List<FailedNodeException> failures, long fetchingRound) { if (closed) { // we are closed, no need to process this async fetch at all - logger.trace("{} ignoring fetched [{}] results, already closed", shardId, type); + logger.trace("{} ignoring fetched [{}] results, already closed", reroutingKey, type); return; } - logger.trace("{} processing fetched [{}] results", shardId, type); + logger.trace("{} processing fetched [{}] results", reroutingKey, type); if (responses != null) { for (T response : responses) { @@ -212,7 +275,7 @@ protected synchronized void processAsyncFetch(List<T> responses, List<FailedNode assert nodeEntry.getFetchingRound() > fetchingRound : "node entries only replaced by newer rounds"; logger.trace( "{} received response for [{}] from node {} for an older fetching round (expected: {} but was: {})", - shardId, + reroutingKey, nodeEntry.getNodeId(), type, nodeEntry.getFetchingRound(), @@ -221,14 +284,14 @@ protected synchronized void processAsyncFetch(List<T> responses, List<FailedNode } else if (nodeEntry.isFailed()) { logger.trace( "{} node {} has failed for [{}] (failure [{}])", - shardId, + reroutingKey, nodeEntry.getNodeId(), type, nodeEntry.getFailure() ); } else { // if the entry is there, for the right fetching round and not marked as failed already, process it - logger.trace("{} marking {} as done for [{}], result is [{}]", shardId, nodeEntry.getNodeId(), type, response); + logger.trace("{} marking {} as done for [{}], result is [{}]", reroutingKey, nodeEntry.getNodeId(), type, response); nodeEntry.doneFetching(response); } } @@ -236,14 +299,14 @@ protected synchronized void processAsyncFetch(List<T> responses, List<FailedNode } if (failures != null) { for (FailedNodeException failure : failures) { - logger.trace("{} processing failure {} for [{}]", shardId, failure, type); + logger.trace("{} processing failure {} for [{}]", reroutingKey, failure, type); NodeEntry<T> nodeEntry = cache.get(failure.nodeId()); if (nodeEntry != null) { if (nodeEntry.getFetchingRound() != fetchingRound) { assert nodeEntry.getFetchingRound() > fetchingRound : "node entries only replaced by newer rounds"; logger.trace( "{} received failure for [{}] from node {} for an older fetching round (expected: {} but was: {})", - shardId, + reroutingKey, nodeEntry.getNodeId(), type, nodeEntry.getFetchingRound(), @@ -261,7 +324,7 @@ protected synchronized void processAsyncFetch(List<T> responses, List<FailedNode logger.warn( () -> new ParameterizedMessage( "{}: failed to list shard for {} on node [{}]", - shardId, + reroutingKey, type, failure.nodeId() ), @@ -273,13 +336,13 @@ protected synchronized void processAsyncFetch(List<T> responses, List<FailedNode } } } - reroute(shardId, "post_response"); + reroute(reroutingKey, "post_response"); } /** * Implement this in order to scheduled another round that causes a call to fetch data. */ - protected abstract void reroute(ShardId shardId, String reason); + protected abstract void reroute(String reroutingKey, String reason); /** * Clear cache for node, ensuring next fetch will fetch a fresh copy. @@ -334,8 +397,8 @@ private boolean hasAnyNodeFetching(Map<String, NodeEntry<T>> shardCache) { */ // visible for testing void asyncFetch(final DiscoveryNode[] nodes, long fetchingRound) { - logger.trace("{} fetching [{}] from {}", shardId, type, nodes); - action.list(shardId, customDataPath, nodes, new ActionListener<BaseNodesResponse<T>>() { + logger.trace("{} fetching [{}] from {}", reroutingKey, type, nodes); + action.list(shardAttributesMap, nodes, new ActionListener<BaseNodesResponse<T>>() { @Override public void onResponse(BaseNodesResponse<T> response) { processAsyncFetch(response.getNodes(), response.failures(), fetchingRound); @@ -358,14 +421,12 @@ public void onFailure(Exception e) { */ public static class FetchResult<T extends BaseNodeResponse> { - private final ShardId shardId; private final Map<DiscoveryNode, T> data; - private final Set<String> ignoreNodes; + private final Map<ShardId, Set<String>> ignoredShardToNodes; - public FetchResult(ShardId shardId, Map<DiscoveryNode, T> data, Set<String> ignoreNodes) { - this.shardId = shardId; + public FetchResult(Map<DiscoveryNode, T> data, Map<ShardId, Set<String>> ignoreNodes) { this.data = data; - this.ignoreNodes = ignoreNodes; + this.ignoredShardToNodes = ignoreNodes; } /** @@ -389,9 +450,14 @@ public Map<DiscoveryNode, T> getData() { * Process any changes needed to the allocation based on this fetch result. */ public void processAllocation(RoutingAllocation allocation) { - for (String ignoreNode : ignoreNodes) { - allocation.addIgnoreShardForNode(shardId, ignoreNode); + for (Map.Entry<ShardId, Set<String>> entry : ignoredShardToNodes.entrySet()) { + ShardId shardId = entry.getKey(); + Set<String> ignoreNodes = entry.getValue(); + if (ignoreNodes.isEmpty() == false) { + ignoreNodes.forEach(nodeId -> allocation.addIgnoreShardForNode(shardId, nodeId)); + } } + } } diff --git a/server/src/main/java/org/opensearch/gateway/GatewayAllocator.java b/server/src/main/java/org/opensearch/gateway/GatewayAllocator.java index b5a00b1a47523..c8ef9364ebba9 100644 --- a/server/src/main/java/org/opensearch/gateway/GatewayAllocator.java +++ b/server/src/main/java/org/opensearch/gateway/GatewayAllocator.java @@ -56,6 +56,7 @@ import org.opensearch.indices.store.TransportNodesListShardStoreMetadata; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.Spliterators; @@ -226,7 +227,9 @@ private static void clearCacheForPrimary( AsyncShardFetch<TransportNodesListShardStoreMetadata.NodeStoreFilesMetadata> fetch, RoutingAllocation allocation ) { - ShardRouting primary = allocation.routingNodes().activePrimary(fetch.shardId); + assert fetch.shardAttributesMap.size() == 1 : "expected only one shard"; + ShardId shardId = fetch.shardAttributesMap.keySet().iterator().next(); + ShardRouting primary = allocation.routingNodes().activePrimary(shardId); if (primary != null) { fetch.clearCacheForNode(primary.currentNodeId()); } @@ -254,15 +257,15 @@ class InternalAsyncFetch<T extends BaseNodeResponse> extends AsyncShardFetch<T> } @Override - protected void reroute(ShardId shardId, String reason) { - logger.trace("{} scheduling reroute for {}", shardId, reason); + protected void reroute(String reroutingKey, String reason) { + logger.trace("{} scheduling reroute for {}", reroutingKey, reason); assert rerouteService != null; rerouteService.reroute( "async_shard_fetch", Priority.HIGH, ActionListener.wrap( - r -> logger.trace("{} scheduled reroute completed for {}", shardId, reason), - e -> logger.debug(new ParameterizedMessage("{} scheduled reroute failed for {}", shardId, reason), e) + r -> logger.trace("{} scheduled reroute completed for {}", reroutingKey, reason), + e -> logger.debug(new ParameterizedMessage("{} scheduled reroute failed for {}", reroutingKey, reason), e) ) ); } @@ -293,7 +296,11 @@ protected AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.Nod ); AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.NodeGatewayStartedShards> shardState = fetch.fetchData( allocation.nodes(), - allocation.getIgnoreNodes(shard.shardId()) + new HashMap<>() { + { + put(shard.shardId(), allocation.getIgnoreNodes(shard.shardId())); + } + } ); if (shardState.hasData()) { @@ -328,7 +335,11 @@ protected AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetadata.NodeS ); AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetadata.NodeStoreFilesMetadata> shardStores = fetch.fetchData( allocation.nodes(), - allocation.getIgnoreNodes(shard.shardId()) + new HashMap<>() { + { + put(shard.shardId(), allocation.getIgnoreNodes(shard.shardId())); + } + } ); if (shardStores.hasData()) { shardStores.processAllocation(allocation); diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java index a47893f37a97c..601a5c671d67c 100644 --- a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java @@ -62,12 +62,14 @@ import org.opensearch.index.store.Store; import org.opensearch.indices.IndicesService; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.store.ShardAttributes; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportService; import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.Objects; /** @@ -124,7 +126,14 @@ public TransportNodesListGatewayStartedShards( } @Override - public void list(ShardId shardId, String customDataPath, DiscoveryNode[] nodes, ActionListener<NodesGatewayStartedShards> listener) { + public void list( + Map<ShardId, ShardAttributes> shardAttributesMap, + DiscoveryNode[] nodes, + ActionListener<NodesGatewayStartedShards> listener + ) { + assert shardAttributesMap.size() == 1 : "only one shard should be specified"; + final ShardId shardId = shardAttributesMap.keySet().iterator().next(); + final String customDataPath = shardAttributesMap.get(shardId).getCustomDataPath(); execute(new Request(shardId, customDataPath, nodes), listener); } diff --git a/server/src/main/java/org/opensearch/indices/store/ShardAttributes.java b/server/src/main/java/org/opensearch/indices/store/ShardAttributes.java new file mode 100644 index 0000000000000..4ef4e91f7af8c --- /dev/null +++ b/server/src/main/java/org/opensearch/indices/store/ShardAttributes.java @@ -0,0 +1,59 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.indices.store; + +import org.opensearch.common.Nullable; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.gateway.AsyncShardFetch; + +import java.io.IOException; + +/** + * This class contains information about the shard that needs to be sent as part of request in Transport Action implementing + * {@link AsyncShardFetch.Lister} to fetch shard information in async manner + * + * @opensearch.internal + */ +public class ShardAttributes implements Writeable { + private final ShardId shardId; + @Nullable + private final String customDataPath; + + public ShardAttributes(ShardId shardId, String customDataPath) { + this.shardId = shardId; + this.customDataPath = customDataPath; + } + + public ShardAttributes(StreamInput in) throws IOException { + shardId = new ShardId(in); + customDataPath = in.readString(); + } + + public ShardId getShardId() { + return shardId; + } + + /** + * Returns the custom data path that is used to look up information for this shard. + * Returns an empty string if no custom data path is used for this index. + * Returns null if custom data path information is not available (due to BWC). + */ + @Nullable + public String getCustomDataPath() { + return customDataPath; + } + + public void writeTo(StreamOutput out) throws IOException { + shardId.writeTo(out); + out.writeString(customDataPath); + } +} diff --git a/server/src/main/java/org/opensearch/indices/store/TransportNodesListShardStoreMetadata.java b/server/src/main/java/org/opensearch/indices/store/TransportNodesListShardStoreMetadata.java index 0afeae253ae14..5a3c1038cd5f0 100644 --- a/server/src/main/java/org/opensearch/indices/store/TransportNodesListShardStoreMetadata.java +++ b/server/src/main/java/org/opensearch/indices/store/TransportNodesListShardStoreMetadata.java @@ -73,6 +73,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -125,7 +126,14 @@ public TransportNodesListShardStoreMetadata( } @Override - public void list(ShardId shardId, String customDataPath, DiscoveryNode[] nodes, ActionListener<NodesStoreFilesMetadata> listener) { + public void list( + Map<ShardId, ShardAttributes> shardAttributes, + DiscoveryNode[] nodes, + ActionListener<NodesStoreFilesMetadata> listener + ) { + assert shardAttributes.size() == 1 : "only one shard should be specified"; + final ShardId shardId = shardAttributes.keySet().iterator().next(); + final String customDataPath = shardAttributes.get(shardId).getCustomDataPath(); execute(new Request(shardId, customDataPath, nodes), listener); } diff --git a/server/src/test/java/org/opensearch/gateway/AsyncShardFetchTests.java b/server/src/test/java/org/opensearch/gateway/AsyncShardFetchTests.java index 31a27503069d7..4e5e9c71e1fe4 100644 --- a/server/src/test/java/org/opensearch/gateway/AsyncShardFetchTests.java +++ b/server/src/test/java/org/opensearch/gateway/AsyncShardFetchTests.java @@ -39,6 +39,7 @@ import org.opensearch.cluster.node.DiscoveryNodeRole; import org.opensearch.cluster.node.DiscoveryNodes; import org.opensearch.core.index.shard.ShardId; +import org.opensearch.indices.store.ShardAttributes; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; @@ -46,12 +47,13 @@ import org.junit.Before; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicInteger; -import static java.util.Collections.emptySet; +import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; @@ -84,7 +86,16 @@ public class AsyncShardFetchTests extends OpenSearchTestCase { public void setUp() throws Exception { super.setUp(); this.threadPool = new TestThreadPool(getTestName()); - this.test = new TestFetch(threadPool); + if (randomBoolean()) { + this.test = new TestFetch(threadPool); + } else { + HashMap<ShardId, ShardAttributes> shardToCustomDataPath = new HashMap<>(); + ShardId shardId0 = new ShardId("index1", "index_uuid1", 0); + ShardId shardId1 = new ShardId("index2", "index_uuid2", 0); + shardToCustomDataPath.put(shardId0, new ShardAttributes(shardId0, "")); + shardToCustomDataPath.put(shardId1, new ShardAttributes(shardId1, "")); + this.test = new TestFetch(threadPool, shardToCustomDataPath); + } } @After @@ -97,7 +108,7 @@ public void testClose() throws Exception { test.addSimulation(node1.getId(), response1); // first fetch, no data, still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -107,7 +118,7 @@ public void testClose() throws Exception { assertThat(test.reroute.get(), equalTo(1)); test.close(); try { - test.fetchData(nodes, emptySet()); + test.fetchData(nodes, emptyMap()); fail("fetch data should fail when closed"); } catch (IllegalStateException e) { // all is well @@ -119,7 +130,7 @@ public void testFullCircleSingleNodeSuccess() throws Exception { test.addSimulation(node1.getId(), response1); // first fetch, no data, still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -127,7 +138,7 @@ public void testFullCircleSingleNodeSuccess() throws Exception { test.fireSimulationAndWait(node1.getId()); // verify we get back the data node assertThat(test.reroute.get(), equalTo(1)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -139,7 +150,7 @@ public void testFullCircleSingleNodeFailure() throws Exception { test.addSimulation(node1.getId(), failure1); // first fetch, no data, still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -147,19 +158,19 @@ public void testFullCircleSingleNodeFailure() throws Exception { test.fireSimulationAndWait(node1.getId()); // failure, fetched data exists, but has no data assertThat(test.reroute.get(), equalTo(1)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(0)); // on failure, we reset the failure on a successive call to fetchData, and try again afterwards test.addSimulation(node1.getId(), response1); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); test.fireSimulationAndWait(node1.getId()); // 2 reroutes, cause we have a failure that we clear assertThat(test.reroute.get(), equalTo(3)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -170,7 +181,7 @@ public void testIgnoreResponseFromDifferentRound() throws Exception { test.addSimulation(node1.getId(), response1); // first fetch, no data, still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -183,7 +194,7 @@ public void testIgnoreResponseFromDifferentRound() throws Exception { test.fireSimulationAndWait(node1.getId()); // verify we get back the data node assertThat(test.reroute.get(), equalTo(2)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -195,7 +206,7 @@ public void testIgnoreFailureFromDifferentRound() throws Exception { test.addSimulation(node1.getId(), failure1); // first fetch, no data, still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -212,7 +223,7 @@ public void testIgnoreFailureFromDifferentRound() throws Exception { test.fireSimulationAndWait(node1.getId()); // failure, fetched data exists, but has no data assertThat(test.reroute.get(), equalTo(2)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(0)); } @@ -223,7 +234,7 @@ public void testTwoNodesOnSetup() throws Exception { test.addSimulation(node2.getId(), response2); // no fetched data, 2 requests still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -231,14 +242,14 @@ public void testTwoNodesOnSetup() throws Exception { test.fireSimulationAndWait(node1.getId()); // there is still another on going request, so no data assertThat(test.getNumberOfInFlightFetches(), equalTo(1)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); // fire the second simulation, this should allow us to get the data test.fireSimulationAndWait(node2.getId()); // no more ongoing requests, we should fetch the data assertThat(test.reroute.get(), equalTo(2)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(2)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -251,21 +262,21 @@ public void testTwoNodesOnSetupAndFailure() throws Exception { test.addSimulation(node2.getId(), failure2); // no fetched data, 2 requests still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); // fire the first response, it should trigger a reroute test.fireSimulationAndWait(node1.getId()); assertThat(test.reroute.get(), equalTo(1)); - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); // fire the second simulation, this should allow us to get the data test.fireSimulationAndWait(node2.getId()); assertThat(test.reroute.get(), equalTo(2)); // since one of those failed, we should only have one entry - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -276,7 +287,7 @@ public void testTwoNodesAddedInBetween() throws Exception { test.addSimulation(node1.getId(), response1); // no fetched data, 2 requests still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -287,14 +298,14 @@ public void testTwoNodesAddedInBetween() throws Exception { nodes = DiscoveryNodes.builder(nodes).add(node2).build(); test.addSimulation(node2.getId(), response2); // no fetch data, has a new node introduced - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); // fire the second simulation, this should allow us to get the data test.fireSimulationAndWait(node2.getId()); // since one of those failed, we should only have one entry - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(2)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -309,7 +320,7 @@ public void testClearCache() throws Exception { test.clearCacheForNode(node1.getId()); // no fetched data, request still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -317,13 +328,13 @@ public void testClearCache() throws Exception { assertThat(test.reroute.get(), equalTo(1)); // verify we get back right data from node - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); // second fetch gets same data - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1)); @@ -334,14 +345,14 @@ public void testClearCache() throws Exception { test.addSimulation(node1.getId(), response1_2); // no fetched data, new request on going - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); test.fireSimulationAndWait(node1.getId()); assertThat(test.reroute.get(), equalTo(2)); // verify we get new data back - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1_2)); @@ -352,7 +363,7 @@ public void testConcurrentRequestAndClearCache() throws Exception { test.addSimulation(node1.getId(), response1); // no fetched data, request still on going - AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptySet()); + AsyncShardFetch.FetchResult<Response> fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); assertThat(test.reroute.get(), equalTo(0)); @@ -366,14 +377,14 @@ public void testConcurrentRequestAndClearCache() throws Exception { test.addSimulation(node1.getId(), response1_2); // verify still no fetched data, request still on going - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(false)); test.fireSimulationAndWait(node1.getId()); assertThat(test.reroute.get(), equalTo(2)); // verify we get new data back - fetchData = test.fetchData(nodes, emptySet()); + fetchData = test.fetchData(nodes, emptyMap()); assertThat(fetchData.hasData(), equalTo(true)); assertThat(fetchData.getData().size(), equalTo(1)); assertThat(fetchData.getData().get(node1), sameInstance(response1_2)); @@ -403,6 +414,11 @@ static class Entry { this.threadPool = threadPool; } + TestFetch(ThreadPool threadPool, Map<ShardId, ShardAttributes> shardAttributesMap) { + super(LogManager.getLogger(TestFetch.class), "test", shardAttributesMap, null, "test-batch"); + this.threadPool = threadPool; + } + public void addSimulation(String nodeId, Response response) { simulations.put(nodeId, new Entry(response, null)); } @@ -418,7 +434,7 @@ public void fireSimulationAndWait(String nodeId) throws InterruptedException { } @Override - protected void reroute(ShardId shardId, String reason) { + protected void reroute(String shardId, String reason) { reroute.incrementAndGet(); } diff --git a/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java b/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java index c31ce60cb96a1..dceda6433575c 100644 --- a/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java +++ b/server/src/test/java/org/opensearch/gateway/PrimaryShardAllocatorTests.java @@ -857,7 +857,11 @@ protected AsyncShardFetch.FetchResult<TransportNodesListGatewayStartedShards.Nod ShardRouting shard, RoutingAllocation allocation ) { - return new AsyncShardFetch.FetchResult<>(shardId, data, Collections.<String>emptySet()); + return new AsyncShardFetch.FetchResult<>(data, new HashMap<>() { + { + put(shardId, Collections.<String>emptySet()); + } + }); } } } diff --git a/server/src/test/java/org/opensearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/opensearch/gateway/ReplicaShardAllocatorTests.java index 3eeebd8cab6e4..5833d9c4f187f 100644 --- a/server/src/test/java/org/opensearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/opensearch/gateway/ReplicaShardAllocatorTests.java @@ -728,7 +728,11 @@ protected AsyncShardFetch.FetchResult<TransportNodesListShardStoreMetadata.NodeS ); } } - return new AsyncShardFetch.FetchResult<>(shardId, tData, Collections.emptySet()); + return new AsyncShardFetch.FetchResult<>(tData, new HashMap<>() { + { + put(shardId, Collections.emptySet()); + } + }); } @Override diff --git a/server/src/test/java/org/opensearch/indices/store/ShardAttributesTests.java b/server/src/test/java/org/opensearch/indices/store/ShardAttributesTests.java new file mode 100644 index 0000000000000..7fa95fefe72fd --- /dev/null +++ b/server/src/test/java/org/opensearch/indices/store/ShardAttributesTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.indices.store; + +import org.opensearch.core.common.io.stream.DataOutputStreamOutput; +import org.opensearch.core.common.io.stream.InputStreamStreamInput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataOutputStream; +import java.io.IOException; + +public class ShardAttributesTests extends OpenSearchTestCase { + + Index index = new Index("index", "test-uid"); + ShardId shardId = new ShardId(index, 0); + String customDataPath = "/path/to/data"; + + public void testShardAttributesConstructor() { + ShardAttributes attributes = new ShardAttributes(shardId, customDataPath); + assertEquals(attributes.getShardId(), shardId); + assertEquals(attributes.getCustomDataPath(), customDataPath); + } + + public void testSerialization() throws IOException { + ShardAttributes attributes1 = new ShardAttributes(shardId, customDataPath); + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + StreamOutput output = new DataOutputStreamOutput(new DataOutputStream(bytes)); + attributes1.writeTo(output); + output.close(); + StreamInput input = new InputStreamStreamInput(new ByteArrayInputStream(bytes.toByteArray())); + ShardAttributes attributes2 = new ShardAttributes(input); + input.close(); + assertEquals(attributes1.getShardId(), attributes2.getShardId()); + assertEquals(attributes1.getCustomDataPath(), attributes2.getCustomDataPath()); + } + +} diff --git a/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java b/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java index 7462062a0cd46..f123b926f5bad 100644 --- a/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java +++ b/test/framework/src/main/java/org/opensearch/test/gateway/TestGatewayAllocator.java @@ -98,7 +98,11 @@ protected AsyncShardFetch.FetchResult<NodeGatewayStartedShards> fetchData(ShardR ) ); - return new AsyncShardFetch.FetchResult<>(shardId, foundShards, ignoreNodes); + return new AsyncShardFetch.FetchResult<>(foundShards, new HashMap<>() { + { + put(shardId, ignoreNodes); + } + }); } }; @@ -111,7 +115,11 @@ private ReplicationCheckpoint getReplicationCheckpoint(ShardId shardId, String n protected AsyncShardFetch.FetchResult<NodeStoreFilesMetadata> fetchData(ShardRouting shard, RoutingAllocation allocation) { // for now, just pretend no node has data final ShardId shardId = shard.shardId(); - return new AsyncShardFetch.FetchResult<>(shardId, Collections.emptyMap(), allocation.getIgnoreNodes(shardId)); + return new AsyncShardFetch.FetchResult<>(Collections.emptyMap(), new HashMap<>() { + { + put(shardId, allocation.getIgnoreNodes(shardId)); + } + }); } @Override From c52d4a351c0b530180d03ea3f2fcb914d6dc1776 Mon Sep 17 00:00:00 2001 From: Craig Perkins <cwperx@amazon.com> Date: Tue, 2 Jan 2024 18:15:45 -0500 Subject: [PATCH 043/178] Fix issue when calling Delete PIT endpoint and no PITs exist (#11711) Signed-off-by: Craig Perkins <cwperx@amazon.com> --- CHANGELOG.md | 1 + .../opensearch/action/search/PitService.java | 1 + .../search/TransportDeletePitActionTests.java | 42 +++++++++++++++++++ 3 files changed, 44 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 579d2695d3125..a167127a7d795 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -204,6 +204,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) +- Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) ### Security diff --git a/server/src/main/java/org/opensearch/action/search/PitService.java b/server/src/main/java/org/opensearch/action/search/PitService.java index ed12938883e48..b6480ce63f827 100644 --- a/server/src/main/java/org/opensearch/action/search/PitService.java +++ b/server/src/main/java/org/opensearch/action/search/PitService.java @@ -71,6 +71,7 @@ public void deletePitContexts( ) { if (nodeToContextsMap.size() == 0) { listener.onResponse(new DeletePitResponse(Collections.emptyList())); + return; } final Set<String> clusters = nodeToContextsMap.values() .stream() diff --git a/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java index 8d3cdc070c695..a1e3a2b03caf7 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportDeletePitActionTests.java @@ -21,6 +21,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.tasks.TaskId; import org.opensearch.index.query.IdsQueryBuilder; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -33,6 +34,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.RemoteClusterConnectionTests; import org.opensearch.transport.Transport; +import org.opensearch.transport.TransportService; import org.junit.Before; import java.util.ArrayList; @@ -262,6 +264,46 @@ public void getAllPits(ActionListener<GetAllPitNodesResponse> getAllPitsListener } } + public void testDeleteAllPITSuccessWhenNoPITsExist() throws InterruptedException, ExecutionException { + ActionFilters actionFilters = mock(ActionFilters.class); + when(actionFilters.filters()).thenReturn(new ActionFilter[0]); + List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService cluster1Transport = startTransport("cluster_1_node", knownNodes, Version.CURRENT)) { + knownNodes.add(cluster1Transport.getLocalDiscoNode()); + TransportService mockTransportService = mock(TransportService.class); + PitService pitService = new PitService(clusterServiceMock, mock(SearchTransportService.class), mockTransportService, client) { + @Override + public void getAllPits(ActionListener<GetAllPitNodesResponse> getAllPitsListener) { + List<ListPitInfo> list = new ArrayList<>(); + GetAllPitNodeResponse getAllPitNodeResponse = new GetAllPitNodeResponse(cluster1Transport.getLocalDiscoNode(), list); + List<GetAllPitNodeResponse> nodeList = new ArrayList(); + nodeList.add(getAllPitNodeResponse); + getAllPitsListener.onResponse(new GetAllPitNodesResponse(new ClusterName("cn"), nodeList, new ArrayList())); + } + }; + TransportDeletePitAction action = new TransportDeletePitAction( + mockTransportService, + actionFilters, + namedWriteableRegistry, + pitService + ); + DeletePitRequest deletePITRequest = new DeletePitRequest("_all"); + ActionListener<DeletePitResponse> listener = new ActionListener<DeletePitResponse>() { + @Override + public void onResponse(DeletePitResponse deletePitResponse) { + assertEquals(RestStatus.OK, deletePitResponse.status()); + assertEquals(0, deletePitResponse.getDeletePitResults().size()); + } + + @Override + public void onFailure(Exception e) { + fail("Should not receive Exception"); + } + }; + action.execute(task, deletePITRequest, listener); + } + } + public void testDeletePitWhenNodeIsDown() throws InterruptedException, ExecutionException { List<DiscoveryNode> deleteNodesInvoked = new CopyOnWriteArrayList<>(); ActionFilters actionFilters = mock(ActionFilters.class); From 7b1c2c78c16d566da93001d0f6a8224b2de9e8d0 Mon Sep 17 00:00:00 2001 From: Rishabh Maurya <rishabhmaurya05@gmail.com> Date: Tue, 2 Jan 2024 16:04:40 -0800 Subject: [PATCH 044/178] Implementation for match_only_text field (#11039) * Implementation for match_only_text field Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Fix build failures Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Fix bugs Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Added mapper tests, stil failing on prefix and phrase tests Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Disable index prefix and phrase mapper Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Added unit tests for phrase and multiphrase query validation Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Add unit tests for prefix and prefix phrase queries Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Add a test to cover 3 word with synonym match phrase prefix query Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Add unit test for SourceFieldMatchQuery Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Added test for _source disabled case Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Add unit test for missing field Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * more validation tests and changelog update Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Added integration tests for match_only_text replicating text field integ tests Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Added skip section in integ test to fix mixed cluster failures Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * remove unused import Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Address PR comments Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * fix integ tests Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Fix flaky test due to random indexwriter Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * pr comment: header modification Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * Address PR comments Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * addded change to the right section of CHANGELOG Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * overriding the textFieldType before every test Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * rename @Before method Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * update changelog description Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> --------- Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> --- CHANGELOG.md | 1 + .../11_match_field_match_only_text.yml | 70 +++ .../20_ngram_search_field_match_only_text.yml | 144 ++++++ ...ram_highligthing_field_match_only_text.yml | 137 ++++++ .../40_query_string_field_match_only_text.yml | 59 +++ ...default_analyzer_field_match_only_text.yml | 42 ++ ...es_with_synonyms_field_match_only_text.yml | 348 ++++++++++++++ ...60_synonym_graph_field_match_only_text.yml | 209 ++++++++ .../70_intervals_field_match_only_text.yml | 67 +++ .../20_phrase_field_match_only_text.yml | 238 +++++++++ .../20_highlighting_field_match_only_text.yml | 201 ++++++++ .../20_query_string_field_match_only_text.yml | 53 +++ .../30_sig_terms_field_match_only_text.yml | 76 +++ .../90_sig_text_field_match_only_text.yml | 155 ++++++ .../20_highlighting_field_match_only_text.yml | 137 ++++++ .../160_exists_query_match_only_text.yml | 119 +++++ ...00_phrase_search_field_match_only_text.yml | 67 +++ ...atch_bool_prefix_field_match_only_text.yml | 282 +++++++++++ ...disallow_queries_field_match_only_text.yml | 141 ++++++ .../10_basic_field_match_only_field.yml | 92 ++++ .../index/mapper/MappedFieldType.java | 13 + .../mapper/MatchOnlyTextFieldMapper.java | 312 ++++++++++++ .../index/mapper/TextFieldMapper.java | 35 +- .../index/query/SourceFieldMatchQuery.java | 160 +++++++ .../opensearch/index/search/MatchQuery.java | 10 +- .../index/search/MultiMatchQuery.java | 4 +- .../org/opensearch/indices/IndicesModule.java | 2 + .../MatchOnlyTextFieldAnalyzerModeTests.java | 16 + .../mapper/MatchOnlyTextFieldMapperTests.java | 450 ++++++++++++++++++ .../mapper/MatchOnlyTextFieldTypeTests.java | 31 ++ .../mapper/TextFieldAnalyzerModeTests.java | 22 +- .../index/mapper/TextFieldMapperTests.java | 222 ++++----- .../index/mapper/TextFieldTypeTests.java | 32 +- .../query/SourceFieldMatchQueryTests.java | 173 +++++++ .../index/mapper/MapperServiceTestCase.java | 4 +- .../aggregations/AggregatorTestCase.java | 4 +- 36 files changed, 3959 insertions(+), 169 deletions(-) create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml create mode 100644 modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml create mode 100644 modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml create mode 100644 server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java create mode 100644 server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java create mode 100644 server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldAnalyzerModeTests.java create mode 100644 server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java create mode 100644 server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java create mode 100644 server/src/test/java/org/opensearch/index/query/SourceFieldMatchQueryTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index a167127a7d795..026606ff57d65 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -119,6 +119,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Create separate transport action for render search template action ([#11170](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) +- Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml new file mode 100644 index 0000000000000..40ff2c2f4cdbe --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml @@ -0,0 +1,70 @@ +# integration tests for queries with specific analysis chains + +"match query with stacked stems": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + # Tests the match query stemmed tokens are "stacked" on top of the unstemmed + # versions in the same position. + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + analysis: + analyzer: + index: + tokenizer: standard + filter: [lowercase] + search: + rest_total_hits_as_int: true + tokenizer: standard + filter: [lowercase, keyword_repeat, porter_stem, unique_stem] + filter: + unique_stem: + type: unique + only_on_same_position: true + mappings: + properties: + text: + type: match_only_text + analyzer: index + search_analyzer: search + + - do: + index: + index: test + id: 1 + body: { "text": "the fox runs across the street" } + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: fox runs + operator: AND + - match: {hits.total: 1} + + - do: + index: + index: test + id: 2 + body: { "text": "run fox run" } + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: fox runs + operator: AND + - match: {hits.total: 2} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml new file mode 100644 index 0000000000000..95b648dee47c8 --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml @@ -0,0 +1,144 @@ +"ngram search": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + analysis: + analyzer: + my_analyzer: + tokenizer: standard + filter: [my_ngram] + filter: + my_ngram: + type: ngram + min: 2, + max: 2 + mappings: + properties: + text: + type: match_only_text + analyzer: my_analyzer + + - do: + index: + index: test + id: 1 + body: { "text": "foo bar baz" } + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: foa + - match: {hits.total: 1} + +--- +"testNGramCopyField": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + max_ngram_diff: 9 + analysis: + analyzer: + my_ngram_analyzer: + tokenizer: my_ngram_tokenizer + tokenizer: + my_ngram_tokenizer: + type: ngram + min: 1, + max: 10 + token_chars: [] + mappings: + properties: + origin: + type: match_only_text + copy_to: meta + meta: + type: match_only_text + analyzer: my_ngram_analyzer + + - do: + index: + index: test + id: 1 + body: { "origin": "C.A1234.5678" } + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + meta: + query: 1234 + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + meta: + query: 1234.56 + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + meta: + query: A1234 + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + term: + meta: + value: a1234 + - match: {hits.total: 0} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + meta: + query: A1234 + analyzer: my_ngram_analyzer + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + meta: + query: a1234 + analyzer: my_ngram_analyzer + - match: {hits.total: 1} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml new file mode 100644 index 0000000000000..597f55679a2c6 --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml @@ -0,0 +1,137 @@ +"ngram highlighting": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + index.max_ngram_diff: 19 + analysis: + tokenizer: + my_ngramt: + type: ngram + min_gram: 1 + max_gram: 20 + token_chars: letter,digit + filter: + my_ngram: + type: ngram + min_gram: 1 + max_gram: 20 + analyzer: + name2_index_analyzer: + tokenizer: whitespace + filter: [my_ngram] + name_index_analyzer: + tokenizer: my_ngramt + name_search_analyzer: + tokenizer: whitespace + mappings: + properties: + name: + type: match_only_text + term_vector: with_positions_offsets + analyzer: name_index_analyzer + search_analyzer: name_search_analyzer + name2: + type: match_only_text + term_vector: with_positions_offsets + analyzer: name2_index_analyzer + search_analyzer: name_search_analyzer + + - do: + index: + index: test + id: 1 + refresh: true + body: + name: logicacmg ehemals avinci - the know how company + name2: logicacmg ehemals avinci - the know how company + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name: + query: logica m + highlight: + fields: + - name: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name.0: "<em>logica</em>c<em>m</em>g ehe<em>m</em>als avinci - the know how co<em>m</em>pany"} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name: + query: logica ma + highlight: + fields: + - name: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name.0: "<em>logica</em>cmg ehe<em>ma</em>ls avinci - the know how company"} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name: + query: logica + highlight: + fields: + - name: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name.0: "<em>logica</em>cmg ehemals avinci - the know how company"} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name2: + query: logica m + highlight: + fields: + - name2: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name2.0: "<em>logicacmg</em> <em>ehemals</em> avinci - the know how <em>company</em>"} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name2: + query: logica ma + highlight: + fields: + - name2: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name2.0: "<em>logicacmg</em> <em>ehemals</em> avinci - the know how company"} + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + name2: + query: logica + highlight: + fields: + - name2: {} + - match: {hits.total: 1} + - match: {hits.hits.0.highlight.name2.0: "<em>logicacmg</em> ehemals avinci - the know how company"} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml new file mode 100644 index 0000000000000..ddebb1d76acbc --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml @@ -0,0 +1,59 @@ +--- +"Test query string with snowball": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + mappings: + properties: + field: + type: match_only_text + number: + type: integer + + - do: + index: + index: test + id: 1 + body: { field: foo bar} + + - do: + indices.refresh: + index: [test] + + - do: + indices.validate_query: + index: test + q: field:bars + analyzer: snowball + + - is_true: valid + + - do: + search: + rest_total_hits_as_int: true + index: test + q: field:bars + analyzer: snowball + + - match: {hits.total: 1} + + - do: + explain: + index: test + id: 1 + q: field:bars + analyzer: snowball + + - is_true: matched + + - do: + count: + index: test + q: field:bars + analyzer: snowball + + - match: {count : 1} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml new file mode 100644 index 0000000000000..97f3fb65e94a2 --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml @@ -0,0 +1,42 @@ +--- +"Test default search analyzer is applied": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + index.analysis.analyzer.default.type: simple + index.analysis.analyzer.default_search.type: german + mappings: + properties: + body: + type: match_only_text + + - do: + index: + index: test + id: 1 + body: + body: Ich lese die BΓΌcher + + - do: + indices.refresh: + index: [ test ] + + - do: + search: + index: test + q: "body:BΓΌcher" + + - match: { hits.total.value: 0 } + + - do: + search: + index: test + q: "body:BΓΌcher" + analyzer: simple + + - match: { hits.total.value: 1 } diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml new file mode 100644 index 0000000000000..0c263a47a38e6 --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml @@ -0,0 +1,348 @@ +--- +"Test common terms query with stacked tokens": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + features: "allowed_warnings" + + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + syns: + type: synonym + synonyms: [ "quick,fast" ] + analyzer: + syns: + tokenizer: standard + filter: [ "syns" ] + mappings: + properties: + field1: + type: match_only_text + analyzer: syns + field2: + type: match_only_text + analyzer: syns + + - do: + index: + index: test + id: 3 + body: + field1: quick lazy huge brown pidgin + field2: the quick lazy huge brown fox jumps over the tree + + - do: + index: + index: test + id: 1 + body: + field1: the quick brown fox + + - do: + index: + index: test + id: 2 + body: + field1: the quick lazy huge brown fox jumps over the tree + refresh: true + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + low_freq_operator: or + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + low_freq_operator: and + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast brown + cutoff_frequency: 3 + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast huge fox + minimum_should_match: + low_freq: 3 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + minimum_should_match: + high_freq: 5 + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "1" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + minimum_should_match: + high_freq: 6 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the fast lazy fox brown + cutoff_frequency: 1 + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [common] used, replaced by [[match] query which can efficiently skip blocks of documents if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + common: + field1: + query: the quick brown + cutoff_frequency: 3 + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + allowed_warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + operator: and + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + operator: or + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.2._id: "3" } + + - do: + allowed_warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [match] query can skip block of documents efficiently if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + match: + field1: + query: the fast brown + cutoff_frequency: 3 + minimum_should_match: 3 + - match: { hits.total: 2 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.1._id: "2" } + + - do: + allowed_warnings: + - 'Deprecated field [cutoff_frequency] used, replaced by [you can omit this option, the [multi_match] query can skip block of documents efficiently if the total number of hits is not tracked]' + search: + rest_total_hits_as_int: true + body: + query: + multi_match: + query: the fast brown + fields: [ "field1", "field2" ] + cutoff_frequency: 3 + operator: and + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "1" } + - match: { hits.hits.2._id: "2" } + +--- +"Test match query with synonyms - see #3881 for extensive description of the issue": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + analysis: + filter: + synonym: + type: synonym + synonyms: [ "quick,fast" ] + analyzer: + index: + type: custom + tokenizer: standard + filter: lowercase + search: + rest_total_hits_as_int: true + type: custom + tokenizer: standard + filter: [ lowercase, synonym ] + mappings: + properties: + text: + type: match_only_text + analyzer: index + search_analyzer: search + + - do: + index: + index: test + id: 1 + body: + text: quick brown fox + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: quick + operator: and + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: quick brown + operator: and + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: fast + operator: and + - match: { hits.total: 1 } + + - do: + index: + index: test + id: 2 + body: + text: fast brown fox + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: quick + operator: and + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: quick brown + operator: and + - match: { hits.total: 2 } diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml new file mode 100644 index 0000000000000..91a8b1509517e --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml @@ -0,0 +1,209 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + index: + number_of_shards: 1 # keep scoring stable + analysis: + filter: + syns: + type: synonym + synonyms: [ "wtf, what the fudge", "foo, bar baz" ] + graph_syns: + type: synonym_graph + synonyms: [ "wtf, what the fudge", "foo, bar baz" ] + analyzer: + lower_syns: + type: custom + tokenizer: standard + filter: [ lowercase, syns ] + lower_graph_syns: + type: custom + tokenizer: standard + filter: [ lowercase, graph_syns ] + mappings: + properties: + field: + type: match_only_text + + - do: + index: + index: test + id: 1 + body: + text: say wtf happened foo + - do: + index: + index: test + id: 2 + body: + text: bar baz what the fudge man + + - do: + index: + index: test + id: 3 + body: + text: wtf + + - do: + index: + index: test + id: 4 + body: + text: what is the name for fudge + + - do: + index: + index: test + id: 5 + body: + text: bar two three + + - do: + index: + index: test + id: 6 + body: + text: bar baz two three + refresh: true + +--- +"simple multiterm phrase": + - do: + search: + rest_total_hits_as_int: true + body: + query: + match_phrase: + text: + query: foo two three + analyzer: lower_syns + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "5" } # incorrect match because we're not using graph synonyms + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match_phrase: + text: + query: foo two three + analyzer: lower_graph_syns + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "6" } # correct match because we're using graph synonyms + +--- +"simple multiterm and": + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: say what the fudge + analyzer: lower_syns + operator: and + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } # non-graph synonyms coincidentally give us the correct answer here + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: say what the fudge + analyzer: lower_graph_syns + operator: and + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } + +--- +"minimum should match": + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: three what the fudge foo + operator: or + analyzer: lower_graph_syns + auto_generate_synonyms_phrase_query: false + - match: { hits.total: 6 } + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: three what the fudge foo + operator: or + analyzer: lower_graph_syns + minimum_should_match: 80% + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "6" } + - match: { hits.hits.2._id: "1" } + +--- +"multiterm synonyms phrase": + - do: + search: + rest_total_hits_as_int: true + body: + query: + match: + text: + query: wtf + operator: and + analyzer: lower_graph_syns + - match: { hits.total: 3 } + - match: { hits.hits.0._id: "2" } + - match: { hits.hits.1._id: "3" } + - match: { hits.hits.2._id: "1" } + +--- +"phrase prefix": + - do: + index: + index: test + id: 7 + body: + text: "WTFD!" + + - do: + index: + index: test + id: 8 + body: + text: "Weird Al's WHAT THE FUDGESICLE" + refresh: true + + - do: + search: + rest_total_hits_as_int: true + body: + query: + match_phrase_prefix: + text: + query: wtf + analyzer: lower_graph_syns + - match: { hits.total: 5 } + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.1._id: "7" } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.3._id: "8" } + - match: { hits.hits.4._id: "2" } diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml new file mode 100644 index 0000000000000..9792c9d2695ea --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml @@ -0,0 +1,67 @@ +# integration tests for intervals queries using analyzers +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + mappings: + properties: + text: + type: match_only_text + analyzer: standard + text_en: + type: match_only_text + analyzer: english + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test", "_id": "4"}}' + - '{"text" : "Outside it is cold and wet and raining cats and dogs", + "text_en" : "Outside it is cold and wet and raining cats and dogs"}' + +--- +"Test use_field": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + catch: bad_request + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cats + - match: + query: dog + max_gaps: 1 + - match: { status: 400 } + - match: { error.type: "search_phase_execution_exception"} + - match: { error.reason: "all shards failed"} + - do: + catch: bad_request + search: + index: test + body: + query: + intervals: + text: + all_of: + intervals: + - match: + query: cats + - match: + query: dog + use_field: text_en + max_gaps: 1 + - match: { status: 400 } + - match: { error.type: "search_phase_execution_exception"} + - match: { error.reason: "all shards failed"} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml new file mode 100644 index 0000000000000..aff2b3f11101c --- /dev/null +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml @@ -0,0 +1,238 @@ +# Integration tests for the phrase suggester with a few analyzers + +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + analysis: + analyzer: + body: + tokenizer: standard + filter: [lowercase] + bigram: + tokenizer: standard + filter: [lowercase, bigram] + ngram: + tokenizer: standard + filter: [lowercase, ngram] + reverse: + tokenizer: standard + filter: [lowercase, reverse] + filter: + bigram: + type: shingle + output_unigrams: false + min_shingle_size: 2 + max_shingle_size: 2 + ngram: + type: shingle + output_unigrams: true + min_shingle_size: 2 + max_shingle_size: 2 + mappings: + properties: + body: + type: match_only_text + analyzer: body + fields: + bigram: + type: match_only_text + analyzer: bigram + ngram: + type: match_only_text + analyzer: ngram + reverse: + type: match_only_text + analyzer: reverse + + - do: + bulk: + index: test + refresh: true + body: | + { "index": {} } + { "body": "Xorr the God-Jewel" } + { "index": {} } + { "body": "Xorn" } + { "index": {} } + { "body": "Arthur, King of the Britons" } + { "index": {} } + { "body": "Sir Lancelot the Brave" } + { "index": {} } + { "body": "Patsy, Arthur's Servant" } + { "index": {} } + { "body": "Sir Robin the Not-Quite-So-Brave-as-Sir-Lancelot" } + { "index": {} } + { "body": "Sir Bedevere the Wise" } + { "index": {} } + { "body": "Sir Galahad the Pure" } + { "index": {} } + { "body": "Miss Islington, the Witch" } + { "index": {} } + { "body": "Zoot" } + { "index": {} } + { "body": "Leader of Robin's Minstrels" } + { "index": {} } + { "body": "Old Crone" } + { "index": {} } + { "body": "Frank, the Historian" } + { "index": {} } + { "body": "Frank's Wife" } + { "index": {} } + { "body": "Dr. Piglet" } + { "index": {} } + { "body": "Dr. Winston" } + { "index": {} } + { "body": "Sir Robin (Stand-in)" } + { "index": {} } + { "body": "Knight Who Says Ni" } + { "index": {} } + { "body": "Police sergeant who stops the film" } + +--- +"sorts by score": + - do: + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body.ngram + force_unigrams: true + max_errors: 0.5 + direct_generator: + - field: body.ngram + min_word_length: 1 + suggest_mode: always + + - match: {suggest.test.0.options.0.text: xorr the god jewel} + - match: {suggest.test.0.options.1.text: xorn the god jewel} + +--- +"breaks ties by sorting terms": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + # This runs the suggester without bigrams so we can be sure of the sort order + - do: + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body + analyzer: body + force_unigrams: true + max_errors: 0.5 + direct_generator: + - field: body + min_word_length: 1 + suggest_mode: always + + # The scores are identical but xorn comes first because it sorts first + - match: {suggest.test.0.options.0.text: xorn the god jewel} + - match: {suggest.test.0.options.1.text: xorr the god jewel} + - match: {suggest.test.0.options.0.score: $body.suggest.test.0.options.0.score} + +--- +"fails when asked to run on a field without unigrams": + - do: + catch: /since it doesn't emit unigrams/ + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body.bigram + + - do: + catch: /since it doesn't emit unigrams/ + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body.bigram + analyzer: bigram + +--- +"doesn't fail when asked to run on a field without unigrams when force_unigrams=false": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body.bigram + force_unigrams: false + + - do: + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: xor the got-jewel + test: + phrase: + field: body.bigram + analyzer: bigram + force_unigrams: false + +--- +"reverse suggestions": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + search: + rest_total_hits_as_int: true + size: 0 + index: test + body: + suggest: + text: Artur, Ging of the Britons + test: + phrase: + field: body.ngram + force_unigrams: true + max_errors: 0.5 + direct_generator: + - field: body.reverse + min_word_length: 1 + suggest_mode: always + pre_filter: reverse + post_filter: reverse + + - match: {suggest.test.0.options.0.text: arthur king of the britons} diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml new file mode 100644 index 0000000000000..3cb8e09c70aed --- /dev/null +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml @@ -0,0 +1,201 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + + - do: + indices.create: + index: test + body: + settings: + number_of_replicas: 0 + mappings: + properties: + a_field: + type: search_as_you_type + analyzer: simple + max_shingle_size: 4 + text_field: + type: match_only_text + analyzer: simple + + - do: + index: + index: test + id: 1 + body: + a_field: "quick brown fox jump lazy dog" + text_field: "quick brown fox jump lazy dog" + + - do: + indices.refresh: {} + +--- +"phrase query": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_phrase: + a_field: "brown" + highlight: + fields: + a_field: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field.0: "quick <em>brown</em> fox jump lazy dog" } + +--- +"bool prefix query": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_bool_prefix: + a_field: "brown fo" + highlight: + fields: + a_field: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field.0: "quick <em>brown</em> fox jump lazy dog" } + +--- +"multi match bool prefix query 1 complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fo" + type: "bool_prefix" + fields: [ "a_field", "a_field._2gram", "a_field._3gram", "a_field._4gram" ] + highlight: + fields: + a_field: + type: unified + a_field._2gram: + type: unified + a_field._3gram: + type: unified + a_field._4gram: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field: ["quick <em>brown</em> fox jump lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._2gram: null } + - match: { hits.hits.0.highlight.a_field\._3gram: null } + - match: { hits.hits.0.highlight.a_field\._4gram: null } + +--- +"multi match bool prefix query 2 complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox ju" + type: "bool_prefix" + fields: [ "a_field", "a_field._2gram", "a_field._3gram", "a_field._4gram" ] + highlight: + fields: + a_field: + type: unified + a_field._2gram: + type: unified + a_field._3gram: + type: unified + a_field._4gram: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field: ["quick <em>brown</em> <em>fox</em> jump lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._2gram: ["quick <em>brown fox</em> jump lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._3gram: null } + - match: { hits.hits.0.highlight.a_field\._4gram: null } + +--- +"multi match bool prefix query 3 complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump la" + type: "bool_prefix" + fields: [ "a_field", "a_field._2gram", "a_field._3gram", "a_field._4gram" ] + highlight: + fields: + a_field: + type: unified + a_field._2gram: + type: unified + a_field._3gram: + type: unified + a_field._4gram: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field: ["quick <em>brown</em> <em>fox</em> <em>jump</em> lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._2gram: ["quick <em>brown fox jump</em> lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._3gram: ["quick <em>brown fox jump</em> lazy dog"] } + - match: { hits.hits.0.highlight.a_field\._4gram: null } + +--- +"multi match bool prefix query 4 complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump lazy d" + type: "bool_prefix" + fields: [ "a_field", "a_field._2gram", "a_field._3gram", "a_field._4gram" ] + highlight: + fields: + a_field: + type: unified + a_field._2gram: + type: unified + a_field._3gram: + type: unified + a_field._4gram: + type: unified + + - match: { hits.total: 1 } + - match: { hits.hits.0._source.a_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0._source.text_field: "quick brown fox jump lazy dog" } + - match: { hits.hits.0.highlight.a_field: ["quick <em>brown</em> <em>fox</em> <em>jump</em> <em>lazy</em> dog"] } + - match: { hits.hits.0.highlight.a_field\._2gram: ["quick <em>brown fox jump lazy</em> dog"] } + - match: { hits.hits.0.highlight.a_field\._3gram: ["quick <em>brown fox jump lazy</em> dog"] } + - match: { hits.hits.0.highlight.a_field\._4gram: ["quick <em>brown fox jump lazy</em> dog"] } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml new file mode 100644 index 0000000000000..085c5633ac72b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml @@ -0,0 +1,53 @@ +--- +"validate_query with query_string parameters": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + mappings: + properties: + field: + type: match_only_text + number: + type: integer + + - do: + indices.validate_query: + index: test + q: bar + df: field + + - is_true: valid + + - do: + indices.validate_query: + index: test + q: field:foo field:xyz + + - is_true: valid + + - do: + indices.validate_query: + index: test + q: field:foo field:xyz + default_operator: AND + + - is_true: valid + + - do: + indices.validate_query: + index: test + q: field:BA* + + - is_true: valid + + - do: + indices.validate_query: + index: test + q: number:foo + lenient: true + + - is_true: valid diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml new file mode 100644 index 0000000000000..7a96536a2e261 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml @@ -0,0 +1,76 @@ +--- +"Default index": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: goodbad + body: + settings: + number_of_shards: "1" + mappings: + properties: + text: + type: match_only_text + fielddata: true + class: + type: keyword + + - do: + index: + index: goodbad + id: 1 + body: { text: "good", class: "good" } + - do: + index: + index: goodbad + id: 2 + body: { text: "good", class: "good" } + - do: + index: + index: goodbad + id: 3 + body: { text: "bad", class: "bad" } + - do: + index: + index: goodbad + id: 4 + body: { text: "bad", class: "bad" } + - do: + index: + index: goodbad + id: 5 + body: { text: "good bad", class: "good" } + - do: + index: + index: goodbad + id: 6 + body: { text: "good bad", class: "bad" } + - do: + index: + index: goodbad + id: 7 + body: { text: "bad", class: "bad" } + + + + - do: + indices.refresh: + index: [goodbad] + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + + - match: {hits.total: 7} + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + body: {"aggs": {"class": {"terms": {"field": "class"},"aggs": {"sig_terms": {"significant_terms": {"field": "text"}}}}}} + + - match: {aggregations.class.buckets.0.sig_terms.buckets.0.key: "bad"} + - match: {aggregations.class.buckets.1.sig_terms.buckets.0.key: "good"} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml new file mode 100644 index 0000000000000..bc41f157dfdc4 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml @@ -0,0 +1,155 @@ +--- +"Default index": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: goodbad + body: + settings: + number_of_shards: "1" + mappings: + properties: + text: + type: match_only_text + fielddata: false + class: + type: keyword + + - do: + index: + index: goodbad + id: 1 + body: { text: "good", class: "good" } + - do: + index: + index: goodbad + id: 2 + body: { text: "good", class: "good" } + - do: + index: + index: goodbad + id: 3 + body: { text: "bad", class: "bad" } + - do: + index: + index: goodbad + id: 4 + body: { text: "bad", class: "bad" } + - do: + index: + index: goodbad + id: 5 + body: { text: "good bad", class: "good" } + - do: + index: + index: goodbad + id: 6 + body: { text: "good bad", class: "bad" } + - do: + index: + index: goodbad + id: 7 + body: { text: "bad", class: "bad" } + + + + - do: + indices.refresh: + index: [goodbad] + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + + - match: {hits.total: 7} + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + body: {"aggs": {"class": {"terms": {"field": "class"},"aggs": {"sig_text": {"significant_text": {"field": "text"}}}}}} + + - match: {aggregations.class.buckets.0.sig_text.buckets.0.key: "bad"} + - match: {aggregations.class.buckets.1.sig_text.buckets.0.key: "good"} + +--- +"Dedup noise": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: goodbad + body: + settings: + number_of_shards: "1" + mappings: + properties: + text: + type: match_only_text + fielddata: false + class: + type: keyword + + - do: + index: + index: goodbad + id: 1 + body: { text: "good noisewords1 g1 g2 g3 g4 g5 g6", class: "good" } + - do: + index: + index: goodbad + id: 2 + body: { text: "good noisewords2 g1 g2 g3 g4 g5 g6", class: "good" } + - do: + index: + index: goodbad + id: 3 + body: { text: "bad noisewords3 b1 b2 b3 b4 b5 b6", class: "bad" } + - do: + index: + index: goodbad + id: 4 + body: { text: "bad noisewords4 b1 b2 b3 b4 b5 b6", class: "bad" } + - do: + index: + index: goodbad + id: 5 + body: { text: "good bad noisewords5 gb1 gb2 gb3 gb4 gb5 gb6", class: "good" } + - do: + index: + index: goodbad + id: 6 + body: { text: "good bad noisewords6 gb1 gb2 gb3 gb4 gb5 gb6", class: "bad" } + - do: + index: + index: goodbad + id: 7 + body: { text: "bad noisewords7 b1 b2 b3 b4 b5 b6", class: "bad" } + + + + - do: + indices.refresh: + index: [goodbad] + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + + - match: {hits.total: 7} + + - do: + search: + rest_total_hits_as_int: true + index: goodbad + body: {"aggs": {"class": {"terms": {"field": "class"},"aggs": {"sig_text": {"significant_text": {"field": "text", "filter_duplicate_text": true}}}}}} + + - match: {aggregations.class.buckets.0.sig_text.buckets.0.key: "bad"} + - length: { aggregations.class.buckets.0.sig_text.buckets: 1 } + - match: {aggregations.class.buckets.1.sig_text.buckets.0.key: "good"} + - length: { aggregations.class.buckets.1.sig_text.buckets: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml new file mode 100644 index 0000000000000..7100d620bf19e --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml @@ -0,0 +1,137 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + - do: + indices.create: + index: test + body: + mappings: + _source: + excludes: ["nested.stored_only"] + properties: + nested: + type: nested + properties: + field: + type: text + fields: + vectors: + type: text + term_vector: "with_positions_offsets" + postings: + type: text + index_options: "offsets" + stored: + type: match_only_text + store: true + stored_only: + type: match_only_text + store: true + - do: + index: + index: test + id: 1 + refresh: true + body: + nested: + field : "The quick brown fox is brown." + stored : "The quick brown fox is brown." + stored_only : "The quick brown fox is brown." + +--- +"Unified highlighter": + - do: + search: + index: test + body: + query: + nested: + path: "nested" + query: + multi_match: + query: "quick brown fox" + fields: [ "nested.field", "nested.field.vectors", "nested.field.postings" ] + inner_hits: + highlight: + type: "unified" + fields: + nested.field: {} + nested.field.vectors: {} + nested.field.postings: {} + + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.field.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>." } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.field\.vectors.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>." } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.field\.postings.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>." } + +--- +"Unified highlighter with stored fields": + - do: + search: + index: test + body: + query: + nested: + path: "nested" + query: + multi_match: + query: "quick brown fox" + fields: [ "nested.stored", "nested.stored_only" ] + inner_hits: + highlight: + type: "unified" + fields: + nested.stored: {} + nested.stored_only: {} + + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.stored.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>." } + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.stored_only.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>." } + +--- +"Unified highlighter with stored fields and disabled source": + - do: + indices.create: + index: disabled_source + body: + mappings: + _source: + enabled: false + properties: + nested: + type: nested + properties: + field: + type: match_only_text + stored_only: + type: match_only_text + store: true + - do: + index: + index: disabled_source + id: 1 + refresh: true + body: + nested: + field: "The quick brown fox is brown." + stored_only: "The quick brown fox is brown." + + - do: + search: + index: disabled_source + body: + query: + nested: + path: "nested" + query: + multi_match: + query: "quick brown fox" + fields: ["nested.field", "nested.stored_only"] + inner_hits: + highlight: + type: "unified" + fields: + nested.field: {} + nested.stored_only: {} + + - is_false: hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.field + - match: { hits.hits.0.inner_hits.nested.hits.hits.0.highlight.nested\.stored_only.0: "The <em>quick</em> <em>brown</em> <em>fox</em> is <em>brown</em>."} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml new file mode 100644 index 0000000000000..03626236604a1 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml @@ -0,0 +1,119 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + features: ["headers"] + + - do: + indices.create: + index: test + body: + mappings: + dynamic: false + properties: + match_only_text: + type: match_only_text + + - do: + headers: + Content-Type: application/json + index: + index: "test" + id: 1 + body: + match_only_text: "foo bar" + + - do: + headers: + Content-Type: application/json + index: + index: "test" + id: 2 + body: + match_only_text: "foo bar" + + - do: + headers: + Content-Type: application/json + index: + index: "test" + id: 3 + routing: "route_me" + body: + match_only_text: "foo bar" + + - do: + index: + index: "test" + id: 4 + body: {} + + - do: + indices.create: + index: test-unmapped + body: + mappings: + dynamic: false + properties: + unrelated: + type: keyword + + - do: + index: + index: "test-unmapped" + id: 1 + body: + unrelated: "foo" + + - do: + indices.create: + index: test-empty + body: + mappings: + dynamic: false + properties: + match_only_text: + type: match_only_text + + - do: + indices.refresh: + index: [test, test-unmapped, test-empty] + +--- +"Test exists query on mapped match_only_text field": + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + exists: + field: match_only_text + + - match: {hits.total: 3} + +--- +"Test exists query on unmapped match_only_text field": + - do: + search: + rest_total_hits_as_int: true + index: test-unmapped + body: + query: + exists: + field: match_only_text + + - match: {hits.total: 0} + +--- +"Test exists query on match_only_text field in empty index": + - do: + search: + rest_total_hits_as_int: true + index: test-empty + body: + query: + exists: + field: match_only_text + + - match: {hits.total: 0} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml new file mode 100644 index 0000000000000..a41b8d353e3e9 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml @@ -0,0 +1,67 @@ +--- +"search with indexed phrases": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + + - do: + indices.create: + index: test + body: + mappings: + properties: + text: + type: match_only_text + + - do: + index: + index: test + id: 1 + body: { text: "peter piper picked a peck of pickled peppers" } + + - do: + indices.refresh: + index: [test] + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_phrase: + text: + query: "peter piper" + + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + index: test + q: '"peter piper"~1' + df: text + + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_phrase: + text: "peter piper picked" + + - match: {hits.total: 1} + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_phrase: + text: "piper" + + - match: {hits.total: 1} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml new file mode 100644 index 0000000000000..fc4e9f9de0f38 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml @@ -0,0 +1,282 @@ +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + + - do: + indices.create: + index: test + body: + mappings: + properties: + my_field1: + type: match_only_text + my_field2: + type: match_only_text + + - do: + index: + index: test + id: 1 + body: + my_field1: "brown fox jump" + my_field2: "xylophone" + + - do: + index: + index: test + id: 2 + body: + my_field1: "brown emu jump" + my_field2: "xylophone" + + - do: + index: + index: test + id: 3 + body: + my_field1: "jumparound" + my_field2: "emu" + + - do: + index: + index: test + id: 4 + body: + my_field1: "dog" + my_field2: "brown fox jump lazy" + + - do: + indices.refresh: {} + +--- +"minimum should match": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_bool_prefix: + my_field1: + query: "brown fox jump" + minimum_should_match: 3 + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._source.my_field1: "brown fox jump" } + +--- +"analyzer": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_bool_prefix: + my_field1: + query: "BROWN dog" + analyzer: whitespace # this analyzer doesn't lowercase terms + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._source.my_field1: "dog" } + +--- +"operator": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_bool_prefix: + my_field1: + query: "brown fox jump" + operator: AND + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._source.my_field1: "brown fox jump" } + +--- +"fuzziness": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + match_bool_prefix: + my_field2: + query: "xylophoen foo" + fuzziness: 1 + prefix_length: 1 + max_expansions: 10 + fuzzy_transpositions: true + fuzzy_rewrite: constant_score + + - match: { hits.total: 2 } + - match: { hits.hits.0._source.my_field2: "xylophone" } + - match: { hits.hits.1._source.my_field2: "xylophone" } + +--- +"multi_match single field complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump" + type: bool_prefix + fields: [ "my_field1" ] + + - match: { hits.total: 3 } + +--- +"multi_match single field partial term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox ju" + type: bool_prefix + fields: [ "my_field1" ] + + - match: { hits.total: 3 } + +--- +"multi_match multiple fields complete term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump lazy" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + + - match: { hits.total: 3 } + +--- +"multi_match multiple fields partial term": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump laz" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + + - match: { hits.total: 3 } + +--- +"multi_match multiple fields with analyzer": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "BROWN FOX JUMP dog" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + analyzer: whitespace # this analyzer doesn't lowercase terms + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._source.my_field1: "dog" } + - match: { hits.hits.0._source.my_field2: "brown fox jump lazy" } + +--- +"multi_match multiple fields with minimum_should_match": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown fox jump la" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + minimum_should_match: 4 + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._source.my_field1: "dog" } + - match: { hits.hits.0._source.my_field2: "brown fox jump lazy" } + +--- +"multi_match multiple fields with fuzziness": + + - do: + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "dob nomatch" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + fuzziness: 1 + + - match: { hits.total: 1 } + - match: { hits.hits.0._id: "4" } + - match: { hits.hits.0._source.my_field1: "dog" } + - match: { hits.hits.0._source.my_field2: "brown fox jump lazy" } + +--- +"multi_match multiple fields with slop throws exception": + + - do: + catch: /\[slop\] not allowed for type \[bool_prefix\]/ + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + slop: 1 + +--- +"multi_match multiple fields with cutoff_frequency throws exception": + + - do: + catch: /\[cutoff_frequency\] not allowed for type \[bool_prefix\]/ + search: + rest_total_hits_as_int: true + index: test + body: + query: + multi_match: + query: "brown" + type: bool_prefix + fields: [ "my_field1", "my_field2" ] + cutoff_frequency: 0.001 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml new file mode 100644 index 0000000000000..f4faf87eb83cc --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml @@ -0,0 +1,141 @@ +--- +setup: + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + + - do: + indices.create: + index: test + body: + mappings: + properties: + text: + type: match_only_text + analyzer: standard + fields: + raw: + type: keyword + nested1: + type: nested + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "test", "_id": "1"}}' + - '{"text" : "Some like it hot, some like it cold", "nested1": [{"foo": "bar1"}]}' + - '{"index": {"_index": "test", "_id": "2"}}' + - '{"text" : "Its cold outside, theres no kind of atmosphere", "nested1": [{"foo": "bar2"}]}' + - '{"index": {"_index": "test", "_id": "3"}}' + - '{"text" : "Baby its cold there outside", "nested1": [{"foo": "bar3"}]}' + - '{"index": {"_index": "test", "_id": "4"}}' + - '{"text" : "Outside it is cold and wet", "nested1": [{"foo": "bar4"}]}' + +--- +teardown: + + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: null + +--- +"Test disallow expensive queries": + + ### Check for initial setting = null -> false + - do: + cluster.get_settings: + flat_settings: true + + - is_false: search.allow_expensive_queries + + ### Update setting to false + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "false" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "false"}} + + ### Prefix + - do: + catch: /\[prefix\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false. For optimised prefix queries on text fields please enable \[index_prefixes\]./ + search: + index: test + body: + query: + prefix: + text: + value: out + + ### Fuzzy + - do: + catch: /\[fuzzy\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + ### Regexp + - do: + catch: /\[regexp\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + ### Wildcard + - do: + catch: /\[wildcard\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + ### Range on text + - do: + catch: /\[range\] queries on \[text\] or \[keyword\] fields cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + range: + text: + gte: "theres" + + ### Range on keyword + - do: + catch: /\[range\] queries on \[text\] or \[keyword\] fields cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + range: + text.raw: + gte : "Outside it is cold and wet" + + ### Nested + - do: + catch: /\[joining\] queries cannot be executed when \'search.allow_expensive_queries\' is set to false./ + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match" : {"nested1.foo" : "bar2"}}] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml new file mode 100644 index 0000000000000..cc15796e4697f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml @@ -0,0 +1,92 @@ +--- +"Search shards aliases with and without filters": + - skip: + version: " - 2.99.99" + reason: "match_only_text was added in 3.0" + + - do: + indices.create: + index: test_index + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + field: + type: match_only_text + aliases: + test_alias_no_filter: {} + test_alias_filter_1: + filter: + term: + field : value1 + test_alias_filter_2: + filter: + term: + field : value2 + + - do: + search_shards: + index: test_alias_no_filter + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - is_true: indices.test_index + - is_false: indices.test_index.filter + - match: { indices.test_index.aliases: [test_alias_no_filter]} + + - do: + search_shards: + index: test_alias_filter_1 + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index.aliases: [test_alias_filter_1] } + - match: { indices.test_index.filter.term.field.value: value1 } + - lte: { indices.test_index.filter.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.term.field.boost: 1.0 } + + - do: + search_shards: + index: ["test_alias_filter_1","test_alias_filter_2"] + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index.aliases: [test_alias_filter_1, test_alias_filter_2]} + - length: { indices.test_index.filter.bool.should: 2 } + - lte: { indices.test_index.filter.bool.should.0.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.bool.should.0.term.field.boost: 1.0 } + - lte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } + - gte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } + - match: { indices.test_index.filter.bool.adjust_pure_negative: true} + - lte: { indices.test_index.filter.bool.boost: 1.0 } + - gte: { indices.test_index.filter.bool.boost: 1.0 } + + - do: + search_shards: + index: "test*" + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index.aliases: [test_alias_filter_1, test_alias_filter_2, test_alias_no_filter]} + - is_false: indices.test_index.filter + + - do: + search_shards: + index: ["test_alias_filter_1","test_alias_no_filter"] + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index.aliases: [test_alias_filter_1, test_alias_no_filter]} + - is_false: indices.test_index.filter + + - do: + search_shards: + index: ["test_alias_no_filter"] + + - length: { shards: 1 } + - match: { shards.0.0.index: test_index } + - match: { indices.test_index.aliases: [test_alias_no_filter]} + - is_false: indices.test_index.filter diff --git a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java index da62ddfd7017d..66d4654e543a2 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/MappedFieldType.java @@ -359,18 +359,31 @@ public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionInc ); } + public Query phraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context) throws IOException { + return phraseQuery(stream, slop, enablePositionIncrements); + } + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements) throws IOException { throw new IllegalArgumentException( "Can only use phrase queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context) + throws IOException { + return multiPhraseQuery(stream, slop, enablePositionIncrements); + } + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions) throws IOException { throw new IllegalArgumentException( "Can only use phrase prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" ); } + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, QueryShardContext context) throws IOException { + return phrasePrefixQuery(stream, slop, maxExpansions); + } + public SpanQuery spanPrefixQuery(String value, SpanMultiTermQueryWrapper.SpanRewriteMethod method, QueryShardContext context) { throw new IllegalArgumentException( "Can only use span prefix queries on text fields - not on [" + name + "] which is of type [" + typeName() + "]" diff --git a/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java new file mode 100644 index 0000000000000..fb97f8c309a70 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapper.java @@ -0,0 +1,312 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.mapper; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute; +import org.apache.lucene.document.FieldType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.opensearch.Version; +import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.opensearch.index.analysis.IndexAnalyzers; +import org.opensearch.index.analysis.NamedAnalyzer; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.SourceFieldMatchQuery; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.function.Function; + +/** + * A specialized type of TextFieldMapper which disables the positions and norms to save on storage and executes phrase queries, which requires + * positional data, in a slightly less efficient manner using the {@link org.opensearch.index.query.SourceFieldMatchQuery}. + */ +public class MatchOnlyTextFieldMapper extends TextFieldMapper { + + public static final FieldType FIELD_TYPE = new FieldType(); + public static final String CONTENT_TYPE = "match_only_text"; + private final String indexOptions = FieldMapper.indexOptionToString(FIELD_TYPE.indexOptions()); + private final boolean norms = FIELD_TYPE.omitNorms() == false; + + @Override + protected String contentType() { + return CONTENT_TYPE; + } + + static { + FIELD_TYPE.setTokenized(true); + FIELD_TYPE.setStored(false); + FIELD_TYPE.setStoreTermVectors(false); + FIELD_TYPE.setOmitNorms(true); + FIELD_TYPE.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE.freeze(); + } + + public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, c.indexVersionCreated(), c.getIndexAnalyzers())); + + protected MatchOnlyTextFieldMapper( + String simpleName, + FieldType fieldType, + MatchOnlyTextFieldType mappedFieldType, + TextFieldMapper.PrefixFieldMapper prefixFieldMapper, + TextFieldMapper.PhraseFieldMapper phraseFieldMapper, + MultiFields multiFields, + CopyTo copyTo, + Builder builder + ) { + + super(simpleName, fieldType, mappedFieldType, prefixFieldMapper, phraseFieldMapper, multiFields, copyTo, builder); + } + + @Override + public ParametrizedFieldMapper.Builder getMergeBuilder() { + return new Builder(simpleName(), this.indexCreatedVersion, this.indexAnalyzers).init(this); + } + + /** + * Builder class for constructing the MatchOnlyTextFieldMapper. + */ + public static class Builder extends TextFieldMapper.Builder { + final Parameter<String> indexOptions = indexOptions(m -> ((MatchOnlyTextFieldMapper) m).indexOptions); + + private static Parameter<String> indexOptions(Function<FieldMapper, String> initializer) { + return Parameter.restrictedStringParam("index_options", false, initializer, "docs"); + } + + final Parameter<Boolean> norms = norms(m -> ((MatchOnlyTextFieldMapper) m).norms); + final Parameter<Boolean> indexPhrases = Parameter.boolParam( + "index_phrases", + false, + m -> ((MatchOnlyTextFieldType) m.mappedFieldType).indexPhrases, + false + ).setValidator(v -> { + if (v == true) { + throw new MapperParsingException("Index phrases cannot be enabled on for match_only_text field. Use text field instead"); + } + }); + + final Parameter<PrefixConfig> indexPrefixes = new Parameter<>( + "index_prefixes", + false, + () -> null, + TextFieldMapper::parsePrefixConfig, + m -> Optional.ofNullable(((MatchOnlyTextFieldType) m.mappedFieldType).prefixFieldType) + .map(p -> new PrefixConfig(p.minChars, p.maxChars)) + .orElse(null) + ).acceptsNull().setValidator(v -> { + if (v != null) { + throw new MapperParsingException("Index prefixes cannot be enabled on for match_only_text field. Use text field instead"); + } + }); + + private static Parameter<Boolean> norms(Function<FieldMapper, Boolean> initializer) { + return Parameter.boolParam("norms", false, initializer, false) + .setMergeValidator((o, n) -> o == n || (o && n == false)) + .setValidator(v -> { + if (v == true) { + throw new MapperParsingException("Norms cannot be enabled on for match_only_text field"); + } + }); + } + + public Builder(String name, IndexAnalyzers indexAnalyzers) { + super(name, indexAnalyzers); + } + + public Builder(String name, Version indexCreatedVersion, IndexAnalyzers indexAnalyzers) { + super(name, indexCreatedVersion, indexAnalyzers); + } + + @Override + public MatchOnlyTextFieldMapper build(BuilderContext context) { + FieldType fieldType = TextParams.buildFieldType(index, store, indexOptions, norms, termVectors); + MatchOnlyTextFieldType tft = buildFieldType(fieldType, context); + return new MatchOnlyTextFieldMapper( + name, + fieldType, + tft, + buildPrefixMapper(context, fieldType, tft), + buildPhraseMapper(fieldType, tft), + multiFieldsBuilder.build(this, context), + copyTo.build(), + this + ); + } + + @Override + protected MatchOnlyTextFieldType buildFieldType(FieldType fieldType, BuilderContext context) { + NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); + NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer(); + NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer(); + + if (fieldType.indexOptions().compareTo(IndexOptions.DOCS) > 0) { + throw new IllegalArgumentException("Cannot set position_increment_gap on field [" + name + "] without positions enabled"); + } + if (positionIncrementGap.get() != POSITION_INCREMENT_GAP_USE_ANALYZER) { + if (fieldType.indexOptions().compareTo(IndexOptions.DOCS) < 0) { + throw new IllegalArgumentException( + "Cannot set position_increment_gap on field [" + name + "] without indexing enabled" + ); + } + indexAnalyzer = new NamedAnalyzer(indexAnalyzer, positionIncrementGap.get()); + searchAnalyzer = new NamedAnalyzer(searchAnalyzer, positionIncrementGap.get()); + searchQuoteAnalyzer = new NamedAnalyzer(searchQuoteAnalyzer, positionIncrementGap.get()); + } + TextSearchInfo tsi = new TextSearchInfo(fieldType, similarity.getValue(), searchAnalyzer, searchQuoteAnalyzer); + MatchOnlyTextFieldType ft = new MatchOnlyTextFieldType( + buildFullName(context), + index.getValue(), + fieldType.stored(), + tsi, + meta.getValue() + ); + ft.setIndexAnalyzer(indexAnalyzer); + ft.setEagerGlobalOrdinals(eagerGlobalOrdinals.getValue()); + ft.setBoost(boost.getValue()); + if (fieldData.getValue()) { + ft.setFielddata(true, freqFilter.getValue()); + } + return ft; + } + + @Override + protected List<Parameter<?>> getParameters() { + return Arrays.asList( + index, + store, + indexOptions, + norms, + termVectors, + analyzers.indexAnalyzer, + analyzers.searchAnalyzer, + analyzers.searchQuoteAnalyzer, + similarity, + positionIncrementGap, + fieldData, + freqFilter, + eagerGlobalOrdinals, + indexPhrases, + indexPrefixes, + boost, + meta + ); + } + } + + /** + * The specific field type for MatchOnlyTextFieldMapper + * + * @opensearch.internal + */ + public static final class MatchOnlyTextFieldType extends TextFieldType { + private final boolean indexPhrases = false; + + private PrefixFieldType prefixFieldType; + + @Override + public String typeName() { + return CONTENT_TYPE; + } + + public MatchOnlyTextFieldType(String name, boolean indexed, boolean stored, TextSearchInfo tsi, Map<String, String> meta) { + super(name, indexed, stored, tsi, meta); + } + + @Override + public Query phraseQuery(TokenStream stream, int slop, boolean enablePosIncrements, QueryShardContext context) throws IOException { + PhraseQuery phraseQuery = (PhraseQuery) super.phraseQuery(stream, slop, enablePosIncrements); + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (Term term : phraseQuery.getTerms()) { + builder.add(new TermQuery(term), BooleanClause.Occur.FILTER); + } + return new SourceFieldMatchQuery(builder.build(), phraseQuery, this, context); + } + + @Override + public Query multiPhraseQuery(TokenStream stream, int slop, boolean enablePositionIncrements, QueryShardContext context) + throws IOException { + MultiPhraseQuery multiPhraseQuery = (MultiPhraseQuery) super.multiPhraseQuery(stream, slop, enablePositionIncrements); + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (Term[] terms : multiPhraseQuery.getTermArrays()) { + if (terms.length > 1) { + // Multiple terms in the same position, creating a disjunction query for it and + // adding it to conjunction query + BooleanQuery.Builder disjunctions = new BooleanQuery.Builder(); + for (Term term : terms) { + disjunctions.add(new TermQuery(term), BooleanClause.Occur.SHOULD); + } + builder.add(disjunctions.build(), BooleanClause.Occur.FILTER); + } else { + builder.add(new TermQuery(terms[0]), BooleanClause.Occur.FILTER); + } + } + return new SourceFieldMatchQuery(builder.build(), multiPhraseQuery, this, context); + } + + @Override + public Query phrasePrefixQuery(TokenStream stream, int slop, int maxExpansions, QueryShardContext context) throws IOException { + Query phrasePrefixQuery = super.phrasePrefixQuery(stream, slop, maxExpansions); + List<List<Term>> termArray = getTermsFromTokenStream(stream); + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + for (int i = 0; i < termArray.size(); i++) { + if (i == termArray.size() - 1) { + // last element of the term Array is a prefix, thus creating a prefix query for it and adding it to + // conjunction query + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery(name()); + mqb.add(termArray.get(i).toArray(new Term[0])); + builder.add(mqb, BooleanClause.Occur.FILTER); + } else { + if (termArray.get(i).size() > 1) { + // multiple terms in the same position, creating a disjunction query for it and + // adding it to conjunction query + BooleanQuery.Builder disjunctions = new BooleanQuery.Builder(); + for (Term term : termArray.get(i)) { + disjunctions.add(new TermQuery(term), BooleanClause.Occur.SHOULD); + } + builder.add(disjunctions.build(), BooleanClause.Occur.FILTER); + } else { + builder.add(new TermQuery(termArray.get(i).get(0)), BooleanClause.Occur.FILTER); + } + } + } + return new SourceFieldMatchQuery(builder.build(), phrasePrefixQuery, this, context); + } + + private List<List<Term>> getTermsFromTokenStream(TokenStream stream) throws IOException { + final List<List<Term>> termArray = new ArrayList<>(); + TermToBytesRefAttribute termAtt = stream.getAttribute(TermToBytesRefAttribute.class); + PositionIncrementAttribute posIncrAtt = stream.getAttribute(PositionIncrementAttribute.class); + List<Term> currentTerms = new ArrayList<>(); + stream.reset(); + while (stream.incrementToken()) { + if (posIncrAtt.getPositionIncrement() != 0) { + if (currentTerms.isEmpty() == false) { + termArray.add(List.copyOf(currentTerms)); + } + currentTerms.clear(); + } + currentTerms.add(new Term(name(), termAtt.getBytesRef())); + } + termArray.add(List.copyOf(currentTerms)); + return termArray; + } + } +} diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index 1d0d1ae2bd899..d0e041e68a81d 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -110,7 +110,7 @@ public class TextFieldMapper extends ParametrizedFieldMapper { public static final String CONTENT_TYPE = "text"; - private static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; + protected static final int POSITION_INCREMENT_GAP_USE_ANALYZER = -1; private static final String FAST_PHRASE_SUFFIX = "._index_phrase"; /** @@ -152,11 +152,11 @@ private static TextFieldMapper toType(FieldMapper in) { * * @opensearch.internal */ - private static final class PrefixConfig implements ToXContent { + protected static final class PrefixConfig implements ToXContent { final int minChars; final int maxChars; - private PrefixConfig(int minChars, int maxChars) { + PrefixConfig(int minChars, int maxChars) { this.minChars = minChars; this.maxChars = maxChars; if (minChars > maxChars) { @@ -198,7 +198,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - private static PrefixConfig parsePrefixConfig(String propName, ParserContext parserContext, Object propNode) { + static PrefixConfig parsePrefixConfig(String propName, ParserContext parserContext, Object propNode) { if (propNode == null) { return null; } @@ -214,7 +214,7 @@ private static PrefixConfig parsePrefixConfig(String propName, ParserContext par * * @opensearch.internal */ - private static final class FielddataFrequencyFilter implements ToXContent { + protected static final class FielddataFrequencyFilter implements ToXContent { final double minFreq; final double maxFreq; final int minSegmentSize; @@ -280,15 +280,14 @@ public static class Builder extends ParametrizedFieldMapper.Builder { private final Version indexCreatedVersion; - private final Parameter<Boolean> index = Parameter.indexParam(m -> toType(m).mappedFieldType.isSearchable(), true); - private final Parameter<Boolean> store = Parameter.storeParam(m -> toType(m).fieldType.stored(), false); + protected final Parameter<Boolean> index = Parameter.indexParam(m -> toType(m).mappedFieldType.isSearchable(), true); + protected final Parameter<Boolean> store = Parameter.storeParam(m -> toType(m).fieldType.stored(), false); final Parameter<SimilarityProvider> similarity = TextParams.similarity(m -> toType(m).similarity); final Parameter<String> indexOptions = TextParams.indexOptions(m -> toType(m).indexOptions); final Parameter<Boolean> norms = TextParams.norms(true, m -> toType(m).fieldType.omitNorms() == false); final Parameter<String> termVectors = TextParams.termVectors(m -> toType(m).termVectors); - final Parameter<Integer> positionIncrementGap = Parameter.intParam( "position_increment_gap", false, @@ -332,8 +331,8 @@ public static class Builder extends ParametrizedFieldMapper.Builder { .orElse(null) ).acceptsNull(); - private final Parameter<Float> boost = Parameter.boostParam(); - private final Parameter<Map<String, String>> meta = Parameter.metaParam(); + protected final Parameter<Float> boost = Parameter.boostParam(); + protected final Parameter<Map<String, String>> meta = Parameter.metaParam(); final TextParams.Analyzers analyzers; @@ -395,7 +394,7 @@ protected List<Parameter<?>> getParameters() { ); } - private TextFieldType buildFieldType(FieldType fieldType, BuilderContext context) { + protected TextFieldType buildFieldType(FieldType fieldType, BuilderContext context) { NamedAnalyzer indexAnalyzer = analyzers.getIndexAnalyzer(); NamedAnalyzer searchAnalyzer = analyzers.getSearchAnalyzer(); NamedAnalyzer searchQuoteAnalyzer = analyzers.getSearchQuoteAnalyzer(); @@ -420,7 +419,7 @@ private TextFieldType buildFieldType(FieldType fieldType, BuilderContext context return ft; } - private PrefixFieldMapper buildPrefixMapper(BuilderContext context, FieldType fieldType, TextFieldType tft) { + protected PrefixFieldMapper buildPrefixMapper(BuilderContext context, FieldType fieldType, TextFieldType tft) { if (indexPrefixes.get() == null) { return null; } @@ -454,7 +453,7 @@ private PrefixFieldMapper buildPrefixMapper(BuilderContext context, FieldType fi return new PrefixFieldMapper(pft, prefixFieldType); } - private PhraseFieldMapper buildPhraseMapper(FieldType fieldType, TextFieldType parent) { + protected PhraseFieldMapper buildPhraseMapper(FieldType fieldType, TextFieldType parent) { if (indexPhrases.get() == false) { return null; } @@ -683,7 +682,7 @@ public Query existsQuery(QueryShardContext context) { * * @opensearch.internal */ - private static final class PhraseFieldMapper extends FieldMapper { + protected static final class PhraseFieldMapper extends FieldMapper { PhraseFieldMapper(FieldType fieldType, PhraseFieldType mappedFieldType) { super(mappedFieldType.name(), fieldType, mappedFieldType, MultiFields.empty(), CopyTo.empty()); @@ -710,7 +709,7 @@ protected String contentType() { * * @opensearch.internal */ - private static final class PrefixFieldMapper extends FieldMapper { + protected static final class PrefixFieldMapper extends FieldMapper { protected PrefixFieldMapper(FieldType fieldType, PrefixFieldType mappedFieldType) { super(mappedFieldType.name(), fieldType, mappedFieldType, MultiFields.empty(), CopyTo.empty()); @@ -968,15 +967,15 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S } - private final FieldType fieldType; + protected final FieldType fieldType; private final PrefixFieldMapper prefixFieldMapper; private final PhraseFieldMapper phraseFieldMapper; private final SimilarityProvider similarity; private final String indexOptions; private final String termVectors; private final int positionIncrementGap; - private final Version indexCreatedVersion; - private final IndexAnalyzers indexAnalyzers; + protected final Version indexCreatedVersion; + protected final IndexAnalyzers indexAnalyzers; private final FielddataFrequencyFilter freqFilter; protected TextFieldMapper( diff --git a/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java b/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java new file mode 100644 index 0000000000000..b0be20e417efe --- /dev/null +++ b/server/src/main/java/org/opensearch/index/query/SourceFieldMatchQuery.java @@ -0,0 +1,160 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.query; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.memory.MemoryIndex; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.SourceValueFetcher; +import org.opensearch.search.lookup.LeafSearchLookup; +import org.opensearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * A query that matches against each document from the parent query by filtering using the source field values. + * Useful to query against field type which doesn't store positional data and field is not stored/computed dynamically. + */ +public class SourceFieldMatchQuery extends Query { + private final Query delegateQuery; + private final Query filter; + private final SearchLookup lookup; + private final MappedFieldType fieldType; + private final SourceValueFetcher valueFetcher; + private final QueryShardContext context; + + /** + * Constructs a SourceFieldMatchQuery. + * + * @param delegateQuery The parent query to use to find matches. + * @param filter The query used to filter further by running against field value fetched using _source field. + * @param fieldType The mapped field type. + * @param context The QueryShardContext to get lookup and valueFetcher + */ + public SourceFieldMatchQuery(Query delegateQuery, Query filter, MappedFieldType fieldType, QueryShardContext context) { + this.delegateQuery = delegateQuery; + this.filter = filter; + this.fieldType = fieldType; + this.context = context; + this.lookup = context.lookup(); + if (!context.documentMapper("").sourceMapper().enabled()) { + throw new IllegalArgumentException( + "SourceFieldMatchQuery error: unable to fetch fields from _source field: _source is disabled in the mappings " + + "for index [" + + context.index().getName() + + "]" + ); + } + this.valueFetcher = (SourceValueFetcher) fieldType.valueFetcher(context, lookup, null); + } + + @Override + public void visit(QueryVisitor visitor) { + delegateQuery.visit(visitor); + } + + @Override + public Query rewrite(IndexSearcher indexSearcher) throws IOException { + Query rewritten = indexSearcher.rewrite(delegateQuery); + if (rewritten == delegateQuery) { + return this; + } + return new SourceFieldMatchQuery(rewritten, filter, fieldType, context); + } + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + + Weight weight = delegateQuery.createWeight(searcher, ScoreMode.TOP_DOCS, boost); + + return new ConstantScoreWeight(this, boost) { + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + + Scorer scorer = weight.scorer(context); + if (scorer == null) { + // none of the docs are matching + return null; + } + DocIdSetIterator approximation = scorer.iterator(); + LeafSearchLookup leafSearchLookup = lookup.getLeafSearchLookup(context); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { + + @Override + public boolean matches() { + leafSearchLookup.setDocument(approximation.docID()); + List<Object> values = valueFetcher.fetchValues(leafSearchLookup.source()); + // Missing fields won't count as match. Can we use a default value for missing field? + if (values.isEmpty()) { + return false; + } + MemoryIndex memoryIndex = new MemoryIndex(); + for (Object value : values) { + memoryIndex.addField(fieldType.name(), (String) value, fieldType.indexAnalyzer()); + } + float score = memoryIndex.search(filter); + return score > 0.0f; + } + + @Override + public float matchCost() { + // arbitrary cost + return 1000f; + } + }; + return new ConstantScoreScorer(this, score(), ScoreMode.TOP_DOCS, twoPhase); + } + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + // It is fine to cache if delegate query weight is cacheable since additional logic here + // is just a filter on top of delegate query matches + return weight.isCacheable(ctx); + } + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (sameClassAs(o) == false) { + return false; + } + SourceFieldMatchQuery other = (SourceFieldMatchQuery) o; + return Objects.equals(this.delegateQuery, other.delegateQuery) + && Objects.equals(this.filter, other.filter) + && Objects.equals(this.fieldType, other.fieldType) + && Objects.equals(this.context, other.context); + } + + @Override + public int hashCode() { + return Objects.hash(classHash(), delegateQuery, filter, fieldType, context); + } + + @Override + public String toString(String f) { + return "SourceFieldMatchQuery (delegate query: [ " + delegateQuery.toString(f) + " ], filter query: [ " + filter.toString(f) + "])"; + } +} diff --git a/server/src/main/java/org/opensearch/index/search/MatchQuery.java b/server/src/main/java/org/opensearch/index/search/MatchQuery.java index 9e2b79971369d..ec6755ea25703 100644 --- a/server/src/main/java/org/opensearch/index/search/MatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MatchQuery.java @@ -67,6 +67,7 @@ import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.MatchOnlyTextFieldMapper; import org.opensearch.index.mapper.TextFieldMapper; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.query.support.QueryParsers; @@ -701,7 +702,7 @@ private Query analyzeMultiBoolean(String field, TokenStream stream, BooleanClaus protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { try { checkForPositions(field); - return fieldType.phraseQuery(stream, slop, enablePositionIncrements); + return fieldType.phraseQuery(stream, slop, enablePositionIncrements, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -714,7 +715,7 @@ protected Query analyzePhrase(String field, TokenStream stream, int slop) throws protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { try { checkForPositions(field); - return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + return fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -728,7 +729,7 @@ private Query analyzePhrasePrefix(String field, TokenStream stream, int slop, in if (positionCount > 1) { checkForPositions(field); } - return fieldType.phrasePrefixQuery(stream, slop, maxExpansions); + return fieldType.phrasePrefixQuery(stream, slop, maxExpansions, context); } catch (IllegalArgumentException | IllegalStateException e) { if (lenient) { return newLenientFieldQuery(field, e); @@ -887,6 +888,9 @@ private Query analyzeGraphPhrase(TokenStream source, String field, Type type, in private void checkForPositions(String field) { if (fieldType.getTextSearchInfo().hasPositions() == false) { + if (fieldType instanceof MatchOnlyTextFieldMapper.MatchOnlyTextFieldType) { + return; + } throw new IllegalStateException("field:[" + field + "] was indexed without position data; cannot run PhraseQuery"); } } diff --git a/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java b/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java index 241f05af2c512..8c0c87e8c9d0c 100644 --- a/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java +++ b/server/src/main/java/org/opensearch/index/search/MultiMatchQuery.java @@ -248,7 +248,7 @@ protected Query newPrefixQuery(Term term) { protected Query analyzePhrase(String field, TokenStream stream, int slop) throws IOException { List<Query> disjunctions = new ArrayList<>(); for (FieldAndBoost fieldType : blendedFields) { - Query query = fieldType.fieldType.phraseQuery(stream, slop, enablePositionIncrements); + Query query = fieldType.fieldType.phraseQuery(stream, slop, enablePositionIncrements, context); if (fieldType.boost != 1f) { query = new BoostQuery(query, fieldType.boost); } @@ -261,7 +261,7 @@ protected Query analyzePhrase(String field, TokenStream stream, int slop) throws protected Query analyzeMultiPhrase(String field, TokenStream stream, int slop) throws IOException { List<Query> disjunctions = new ArrayList<>(); for (FieldAndBoost fieldType : blendedFields) { - Query query = fieldType.fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements); + Query query = fieldType.fieldType.multiPhraseQuery(stream, slop, enablePositionIncrements, context); if (fieldType.boost != 1f) { query = new BoostQuery(query, fieldType.boost); } diff --git a/server/src/main/java/org/opensearch/indices/IndicesModule.java b/server/src/main/java/org/opensearch/indices/IndicesModule.java index 5c2137ec742a4..eea5dbbf57f6c 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesModule.java +++ b/server/src/main/java/org/opensearch/indices/IndicesModule.java @@ -59,6 +59,7 @@ import org.opensearch.index.mapper.IpFieldMapper; import org.opensearch.index.mapper.KeywordFieldMapper; import org.opensearch.index.mapper.Mapper; +import org.opensearch.index.mapper.MatchOnlyTextFieldMapper; import org.opensearch.index.mapper.MetadataFieldMapper; import org.opensearch.index.mapper.NestedPathFieldMapper; import org.opensearch.index.mapper.NumberFieldMapper; @@ -158,6 +159,7 @@ public static Map<String, Mapper.TypeParser> getMappers(List<MapperPlugin> mappe mappers.put(nanoseconds.type(), DateFieldMapper.NANOS_PARSER); mappers.put(IpFieldMapper.CONTENT_TYPE, IpFieldMapper.PARSER); mappers.put(TextFieldMapper.CONTENT_TYPE, TextFieldMapper.PARSER); + mappers.put(MatchOnlyTextFieldMapper.CONTENT_TYPE, MatchOnlyTextFieldMapper.PARSER); mappers.put(KeywordFieldMapper.CONTENT_TYPE, KeywordFieldMapper.PARSER); mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser()); mappers.put(ObjectMapper.NESTED_CONTENT_TYPE, new ObjectMapper.TypeParser()); diff --git a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldAnalyzerModeTests.java b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldAnalyzerModeTests.java new file mode 100644 index 0000000000000..13cb279418fa8 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldAnalyzerModeTests.java @@ -0,0 +1,16 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.mapper; + +public class MatchOnlyTextFieldAnalyzerModeTests extends TextFieldAnalyzerModeTests { + @Override + ParametrizedFieldMapper.TypeParser getTypeParser() { + return MatchOnlyTextFieldMapper.PARSER; + } +} diff --git a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java new file mode 100644 index 0000000000000..580f8cccc9af5 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldMapperTests.java @@ -0,0 +1,450 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.mapper; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.IndexableFieldType; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.MultiPhraseQuery; +import org.apache.lucene.search.PhraseQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.tests.analysis.MockSynonymAnalyzer; +import org.opensearch.common.lucene.search.MultiPhrasePrefixQuery; +import org.opensearch.core.common.Strings; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.index.query.MatchPhrasePrefixQueryBuilder; +import org.opensearch.index.query.MatchPhraseQueryBuilder; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.index.query.SourceFieldMatchQuery; +import org.opensearch.index.search.MatchQuery; +import org.junit.Before; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.core.Is.is; + +public class MatchOnlyTextFieldMapperTests extends TextFieldMapperTests { + + @Before + public void setupMatchOnlyTextFieldMapper() { + textFieldName = "match_only_text"; + } + + @Override + public void testDefaults() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); + assertEquals(fieldMapping(this::minimalMapping).toString(), mapper.mappingSource().toString()); + + ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertEquals("1234", fields[0].stringValue()); + IndexableFieldType fieldType = fields[0].fieldType(); + assertThat(fieldType.omitNorms(), equalTo(true)); + assertTrue(fieldType.tokenized()); + assertFalse(fieldType.stored()); + assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); + assertThat(fieldType.storeTermVectors(), equalTo(false)); + assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); + assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); + assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); + assertEquals(DocValuesType.NONE, fieldType.docValuesType()); + } + + @Override + public void testEnableStore() throws IOException { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).field("store", true))); + ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); + IndexableField[] fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.length); + assertTrue(fields[0].fieldType().stored()); + } + + @Override + public void testIndexOptions() throws IOException { + Map<String, IndexOptions> supportedOptions = new HashMap<>(); + supportedOptions.put("docs", IndexOptions.DOCS); + + Map<String, IndexOptions> unsupportedOptions = new HashMap<>(); + unsupportedOptions.put("freqs", IndexOptions.DOCS_AND_FREQS); + unsupportedOptions.put("positions", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); + unsupportedOptions.put("offsets", IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS); + + for (String option : supportedOptions.keySet()) { + XContentBuilder mapping = MediaTypeRegistry.JSON.contentBuilder().startObject().startObject("_doc").startObject("properties"); + mapping.startObject(option).field("type", textFieldName).field("index_options", option).endObject(); + mapping.endObject().endObject().endObject(); + + DocumentMapper mapper = createDocumentMapper(mapping); + String serialized = Strings.toString(MediaTypeRegistry.JSON, mapper); + assertThat(serialized, containsString("\"docs\":{\"type\":\"match_only_text\"}")); + + ParsedDocument doc = mapper.parse(source(b -> { b.field(option, "1234"); })); + + IndexOptions options = supportedOptions.get(option); + IndexableField[] fields = doc.rootDoc().getFields(option); + assertEquals(1, fields.length); + assertEquals(options, fields[0].fieldType().indexOptions()); + } + + for (String option : unsupportedOptions.keySet()) { + XContentBuilder mapping = MediaTypeRegistry.JSON.contentBuilder().startObject().startObject("_doc").startObject("properties"); + mapping.startObject(option).field("type", textFieldName).field("index_options", option).endObject(); + mapping.endObject().endObject().endObject(); + MapperParsingException e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(mapping)); + assertThat( + e.getMessage(), + containsString( + "Failed to parse mapping [_doc]: Unknown value [" + option + "] for field [index_options] - accepted values are [docs]" + ) + ); + } + } + + @Override + public void testAnalyzedFieldPositionIncrementWithoutPositions() { + for (String indexOptions : List.of("docs")) { + try { + createDocumentMapper( + fieldMapping( + b -> b.field("type", textFieldName).field("index_options", indexOptions).field("position_increment_gap", 10) + ) + ); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public void testBWCSerialization() throws IOException {} + + @Override + public void testPositionIncrementGap() throws IOException {} + + @Override + public void testDefaultPositionIncrementGap() throws IOException {} + + @Override + public void testMinimalToMaximal() throws IOException {} + + @Override + public void testIndexPrefixMapping() throws IOException { + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> createDocumentMapper( + fieldMapping( + b -> b.field("type", textFieldName) + .field("analyzer", "standard") + .startObject("index_prefixes") + .field("min_chars", 2) + .field("max_chars", 10) + .endObject() + ) + ) + ); + assertEquals( + "Failed to parse mapping [_doc]: Index prefixes cannot be enabled on for match_only_text field. Use text field instead", + e.getMessage() + ); + } + + @Override + public void testIndexPrefixIndexTypes() throws IOException { + // not supported and asserted the expected behavior in testIndexPrefixMapping + } + + @Override + public void testFastPhrasePrefixes() throws IOException { + // not supported and asserted the expected behavior in testIndexPrefixMapping + } + + public void testPhrasePrefixes() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("field"); + { + b.field("type", textFieldName); + b.field("analyzer", "my_stop_analyzer"); // "standard" will be replaced with MockSynonymAnalyzer + } + b.endObject(); + b.startObject("synfield"); + { + b.field("type", textFieldName); + b.field("analyzer", "standard"); // "standard" will be replaced with MockSynonymAnalyzer + } + b.endObject(); + })); + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").toQuery(queryShardContext); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("field"); + mqb.add(new Term("field", "words")); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("field"); + mqbFilter.add(new Term("field", "two")); + mqbFilter.add(new Term("field", "words")); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "three words here").toQuery(queryShardContext); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("field"); + mqb.add(new Term("field", "here")); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("field"); + mqbFilter.add(new Term("field", "three")); + mqbFilter.add(new Term("field", "words")); + mqbFilter.add(new Term("field", "here")); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "three")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "words")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "two words").slop(1).toQuery(queryShardContext); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("field"); + mqb.add(new Term("field", "words")); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("field"); + mqbFilter.setSlop(1); + mqbFilter.add(new Term("field", "two")); + mqbFilter.add(new Term("field", "words")); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "singleton").toQuery(queryShardContext); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("field"); + mqb.add(new Term("field", "singleton")); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(mqb, BooleanClause.Occur.FILTER).build(), + mqb, + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q, is(expected)); + } + + { + Query q = new MatchPhrasePrefixQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("field"); + mqb.add(new Term("field", "stopword")); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("field"); + mqbFilter.add(new Term("field", "sparkle")); + mqbFilter.add(new Term[] { new Term("field", "stopword") }, 2); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "sparkle")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "motor dogs"); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("synfield"); + mqb.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("synfield"); + mqbFilter.add(new Term("synfield", "motor")); + mqbFilter.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "motor")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("synfield"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setPhraseSlop(1); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "two dogs"); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("synfield"); + mqb.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("synfield"); + mqbFilter.add(new Term("synfield", "two")); + mqbFilter.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + mqbFilter.setSlop(1); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "two")), BooleanClause.Occur.FILTER) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("synfield"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + + { + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q = matchQuery.parse(MatchQuery.Type.PHRASE_PREFIX, "synfield", "three dogs word"); + MultiPhrasePrefixQuery mqb = new MultiPhrasePrefixQuery("synfield"); + mqb.add(new Term("synfield", "word")); + MultiPhrasePrefixQuery mqbFilter = new MultiPhrasePrefixQuery("synfield"); + mqbFilter.add(new Term("synfield", "three")); + mqbFilter.add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }); + mqbFilter.add(new Term("synfield", "word")); + Query expected = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "three")), BooleanClause.Occur.FILTER) + .add( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "dogs")), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("synfield", "dog")), BooleanClause.Occur.SHOULD) + .build(), + BooleanClause.Occur.FILTER + ) + .add(mqb, BooleanClause.Occur.FILTER) + .build(), + mqbFilter, + mapperService.fieldType("synfield"), + queryShardContext + ); + assertThat(q, equalTo(expected)); + } + } + + @Override + public void testFastPhraseMapping() throws IOException { + MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(mapping(b -> { + b.startObject("field") + .field("type", textFieldName) + .field("analyzer", "my_stop_analyzer") + .field("index_phrases", true) + .endObject(); + // "standard" will be replaced with MockSynonymAnalyzer + b.startObject("synfield").field("type", textFieldName).field("analyzer", "standard").field("index_phrases", true).endObject(); + }))); + assertEquals( + "Failed to parse mapping [_doc]: Index phrases cannot be enabled on for match_only_text field. Use text field instead", + e.getMessage() + ); + } + + @Override + public void testSimpleMerge() throws IOException {} + + public void testPhraseQuery() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("field").field("type", textFieldName).field("analyzer", "my_stop_analyzer").endObject(); + // "standard" will be replaced with MockSynonymAnalyzer + b.startObject("synfield").field("type", textFieldName).field("analyzer", "standard").endObject(); + })); + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + + Query q = new MatchPhraseQueryBuilder("field", "two words").toQuery(queryShardContext); + Query expectedQuery = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "words")), BooleanClause.Occur.FILTER) + .build(), + new PhraseQuery("field", "two", "words"), + mapperService.fieldType("field"), + queryShardContext + ); + + assertThat(q, is(expectedQuery)); + Query q4 = new MatchPhraseQueryBuilder("field", "singleton").toQuery(queryShardContext); + assertThat(q4, is(new TermQuery(new Term("field", "singleton")))); + + Query q2 = new MatchPhraseQueryBuilder("field", "three words here").toQuery(queryShardContext); + expectedQuery = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "three")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "words")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "here")), BooleanClause.Occur.FILTER) + .build(), + new PhraseQuery("field", "three", "words", "here"), + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q2, is(expectedQuery)); + + Query q3 = new MatchPhraseQueryBuilder("field", "two words").slop(2).toQuery(queryShardContext); + expectedQuery = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "two")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "words")), BooleanClause.Occur.FILTER) + .build(), + new PhraseQuery(2, "field", "two", "words"), + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q3, is(expectedQuery)); + + Query q5 = new MatchPhraseQueryBuilder("field", "sparkle a stopword").toQuery(queryShardContext); + expectedQuery = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("field", "sparkle")), BooleanClause.Occur.FILTER) + .add(new TermQuery(new Term("field", "stopword")), BooleanClause.Occur.FILTER) + .build(), + new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build(), + mapperService.fieldType("field"), + queryShardContext + ); + assertThat(q5, is(expectedQuery)); + + MatchQuery matchQuery = new MatchQuery(queryShardContext); + matchQuery.setAnalyzer(new MockSynonymAnalyzer()); + Query q6 = matchQuery.parse(MatchQuery.Type.PHRASE, "synfield", "motor dogs"); + expectedQuery = new SourceFieldMatchQuery( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "motor")), BooleanClause.Occur.FILTER) + .add( + new BooleanQuery.Builder().add(new TermQuery(new Term("synfield", "dogs")), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("synfield", "dog")), BooleanClause.Occur.SHOULD) + .build(), + BooleanClause.Occur.FILTER + ) + .build(), + new MultiPhraseQuery.Builder().add(new Term("synfield", "motor")) + .add(new Term[] { new Term("synfield", "dogs"), new Term("synfield", "dog") }, 1) + .build(), + mapperService.fieldType("synfield"), + queryShardContext + ); + assertThat(q6, is(expectedQuery)); + } +} diff --git a/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java new file mode 100644 index 0000000000000..51234fa04ddc2 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/mapper/MatchOnlyTextFieldTypeTests.java @@ -0,0 +1,31 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.mapper; + +import org.opensearch.common.lucene.Lucene; + +public class MatchOnlyTextFieldTypeTests extends TextFieldTypeTests { + + @Override + TextFieldMapper.TextFieldType createFieldType(boolean searchable) { + TextSearchInfo tsi = new TextSearchInfo( + TextFieldMapper.Defaults.FIELD_TYPE, + null, + Lucene.STANDARD_ANALYZER, + Lucene.STANDARD_ANALYZER + ); + return new MatchOnlyTextFieldMapper.MatchOnlyTextFieldType( + "field", + searchable, + false, + tsi, + ParametrizedFieldMapper.Parameter.metaParam().get() + ); + } +} diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldAnalyzerModeTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldAnalyzerModeTests.java index 93bed729f0974..83a3bdc580ae6 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldAnalyzerModeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldAnalyzerModeTests.java @@ -59,6 +59,9 @@ import static org.mockito.Mockito.when; public class TextFieldAnalyzerModeTests extends OpenSearchTestCase { + ParametrizedFieldMapper.TypeParser getTypeParser() { + return TextFieldMapper.PARSER; + } private static Map<String, NamedAnalyzer> defaultAnalyzers() { Map<String, NamedAnalyzer> analyzers = new HashMap<>(); @@ -101,7 +104,7 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { IndexAnalyzers indexAnalyzers = new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); - TextFieldMapper.PARSER.parse("field", fieldNode, parserContext); + getTypeParser().parse("field", fieldNode, parserContext); // check that "analyzer" set to something that only supports AnalysisMode.SEARCH_TIME or AnalysisMode.INDEX_TIME is blocked AnalysisMode mode = randomFrom(AnalysisMode.SEARCH_TIME, AnalysisMode.INDEX_TIME); @@ -110,7 +113,7 @@ public void testParseTextFieldCheckAnalyzerAnalysisMode() { indexAnalyzers = new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); fieldNode.put("analyzer", "my_analyzer"); - MapperException ex = expectThrows(MapperException.class, () -> { TextFieldMapper.PARSER.parse("name", fieldNode, parserContext); }); + MapperException ex = expectThrows(MapperException.class, () -> { getTypeParser().parse("name", fieldNode, parserContext); }); assertThat( ex.getMessage(), containsString("analyzer [my_named_analyzer] contains filters [my_analyzer] that are not allowed to run") @@ -136,7 +139,7 @@ public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { IndexAnalyzers indexAnalyzers = new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); - TextFieldMapper.PARSER.parse("textField", fieldNode, parserContext); + getTypeParser().parse("textField", fieldNode, parserContext); // check that "analyzer" set to AnalysisMode.INDEX_TIME is blocked mode = AnalysisMode.INDEX_TIME; @@ -151,10 +154,7 @@ public void testParseTextFieldCheckSearchAnalyzerAnalysisMode() { if (settingToTest.equals("search_quote_analyzer")) { fieldNode.put("search_analyzer", "standard"); } - MapperException ex = expectThrows( - MapperException.class, - () -> { TextFieldMapper.PARSER.parse("field", fieldNode, parserContext); } - ); + MapperException ex = expectThrows(MapperException.class, () -> { getTypeParser().parse("field", fieldNode, parserContext); }); assertEquals( "analyzer [my_named_analyzer] contains filters [my_analyzer] that are not allowed to run in search time mode.", ex.getMessage() @@ -174,10 +174,7 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { analyzers.put("my_analyzer", new NamedAnalyzer("my_named_analyzer", AnalyzerScope.INDEX, createAnalyzerWithMode(mode))); IndexAnalyzers indexAnalyzers = new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); - MapperException ex = expectThrows( - MapperException.class, - () -> { TextFieldMapper.PARSER.parse("field", fieldNode, parserContext); } - ); + MapperException ex = expectThrows(MapperException.class, () -> { getTypeParser().parse("field", fieldNode, parserContext); }); assertThat( ex.getMessage(), containsString("analyzer [my_named_analyzer] contains filters [my_analyzer] that are not allowed to run") @@ -193,7 +190,6 @@ public void testParseTextFieldCheckAnalyzerWithSearchAnalyzerAnalysisMode() { indexAnalyzers = new IndexAnalyzers(analyzers, Collections.emptyMap(), Collections.emptyMap()); when(parserContext.getIndexAnalyzers()).thenReturn(indexAnalyzers); - TextFieldMapper.PARSER.parse("field", fieldNode, parserContext); + getTypeParser().parse("field", fieldNode, parserContext); } - } diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java index a9b902e121bda..a22bfa5e845b1 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldMapperTests.java @@ -81,6 +81,7 @@ import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.query.QueryShardContext; import org.opensearch.index.search.MatchQuery; +import org.junit.Before; import java.io.IOException; import java.util.Arrays; @@ -95,6 +96,13 @@ public class TextFieldMapperTests extends MapperTestCase { + public String textFieldName = "text"; + + @Before + public void setup() { + textFieldName = "text"; + } + @Override protected void writeFieldValue(XContentBuilder builder) throws IOException { builder.value(1234); @@ -169,30 +177,34 @@ protected void registerParameters(ParameterChecker checker) throws IOException { checker.registerConflictCheck("index", b -> b.field("index", false)); checker.registerConflictCheck("store", b -> b.field("store", true)); - checker.registerConflictCheck("index_phrases", b -> b.field("index_phrases", true)); - checker.registerConflictCheck("index_prefixes", b -> b.startObject("index_prefixes").endObject()); - checker.registerConflictCheck("index_options", b -> b.field("index_options", "docs")); + if (!textFieldName.equals("match_only_text")) { + checker.registerConflictCheck("index_phrases", b -> b.field("index_phrases", true)); + checker.registerConflictCheck("index_prefixes", b -> b.startObject("index_prefixes").endObject()); + checker.registerConflictCheck("index_options", b -> b.field("index_options", "docs")); + } checker.registerConflictCheck("similarity", b -> b.field("similarity", "boolean")); checker.registerConflictCheck("analyzer", b -> b.field("analyzer", "keyword")); checker.registerConflictCheck("term_vector", b -> b.field("term_vector", "yes")); checker.registerConflictCheck("position_increment_gap", b -> b.field("position_increment_gap", 10)); - // norms can be set from true to false, but not vice versa - checker.registerConflictCheck("norms", fieldMapping(b -> { - b.field("type", "text"); - b.field("norms", false); - }), fieldMapping(b -> { - b.field("type", "text"); - b.field("norms", true); - })); - checker.registerUpdateCheck(b -> { - b.field("type", "text"); - b.field("norms", true); - }, b -> { - b.field("type", "text"); - b.field("norms", false); - }, m -> assertFalse(m.fieldType().getTextSearchInfo().hasNorms())); + if (!textFieldName.equals(MatchOnlyTextFieldMapper.CONTENT_TYPE)) { + // norms can be set from true to false, but not vice versa + checker.registerConflictCheck("norms", fieldMapping(b -> { + b.field("type", textFieldName); + b.field("norms", false); + }), fieldMapping(b -> { + b.field("type", textFieldName); + b.field("norms", true); + })); + checker.registerUpdateCheck(b -> { + b.field("type", textFieldName); + b.field("norms", true); + }, b -> { + b.field("type", textFieldName); + b.field("norms", false); + }, m -> assertFalse(m.fieldType().getTextSearchInfo().hasNorms())); + } checker.registerUpdateCheck(b -> b.field("boost", 2.0), m -> assertEquals(m.fieldType().boost(), 2.0, 0)); @@ -237,7 +249,7 @@ public TokenStream create(TokenStream tokenStream) { @Override protected void minimalMapping(XContentBuilder b) throws IOException { - b.field("type", "text"); + b.field("type", textFieldName); } public void testDefaults() throws IOException { @@ -262,7 +274,7 @@ public void testDefaults() throws IOException { public void testBWCSerialization() throws IOException { MapperService mapperService = createMapperService(fieldMapping(b -> { - b.field("type", "text"); + b.field("type", textFieldName); b.field("fielddata", true); b.startObject("fields"); { @@ -312,7 +324,7 @@ public void testBWCSerialization() throws IOException { } public void testEnableStore() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("store", true))); + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).field("store", true))); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -320,14 +332,14 @@ public void testEnableStore() throws IOException { } public void testDisableIndex() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("index", false))); + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).field("index", false))); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(0, fields.length); } public void testDisableNorms() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("norms", false))); + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).field("norms", false))); ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); IndexableField[] fields = doc.rootDoc().getFields("field"); assertEquals(1, fields.length); @@ -343,7 +355,7 @@ public void testIndexOptions() throws IOException { XContentBuilder mapping = MediaTypeRegistry.JSON.contentBuilder().startObject().startObject("_doc").startObject("properties"); for (String option : supportedOptions.keySet()) { - mapping.startObject(option).field("type", "text").field("index_options", option).endObject(); + mapping.startObject(option).field("type", textFieldName).field("index_options", option).endObject(); } mapping.endObject().endObject().endObject(); @@ -389,7 +401,7 @@ public void testDefaultPositionIncrementGap() throws IOException { public void testPositionIncrementGap() throws IOException { final int positionIncrementGap = randomIntBetween(1, 1000); MapperService mapperService = createMapperService( - fieldMapping(b -> b.field("type", "text").field("position_increment_gap", positionIncrementGap)) + fieldMapping(b -> b.field("type", textFieldName).field("position_increment_gap", positionIncrementGap)) ); ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.array("field", new String[] { "a", "b" }))); @@ -409,16 +421,16 @@ public void testPositionIncrementGap() throws IOException { public void testSearchAnalyzerSerialization() throws IOException { XContentBuilder mapping = fieldMapping( - b -> b.field("type", "text").field("analyzer", "standard").field("search_analyzer", "keyword") + b -> b.field("type", textFieldName).field("analyzer", "standard").field("search_analyzer", "keyword") ); assertEquals(mapping.toString(), createDocumentMapper(mapping).mappingSource().toString()); // special case: default index analyzer - mapping = fieldMapping(b -> b.field("type", "text").field("analyzer", "default").field("search_analyzer", "keyword")); + mapping = fieldMapping(b -> b.field("type", textFieldName).field("analyzer", "default").field("search_analyzer", "keyword")); assertEquals(mapping.toString(), createDocumentMapper(mapping).mappingSource().toString()); // special case: default search analyzer - mapping = fieldMapping(b -> b.field("type", "text").field("analyzer", "keyword").field("search_analyzer", "default")); + mapping = fieldMapping(b -> b.field("type", textFieldName).field("analyzer", "keyword").field("search_analyzer", "default")); assertEquals(mapping.toString(), createDocumentMapper(mapping).mappingSource().toString()); XContentBuilder builder = MediaTypeRegistry.JSON.contentBuilder(); @@ -436,7 +448,7 @@ public void testSearchAnalyzerSerialization() throws IOException { public void testSearchQuoteAnalyzerSerialization() throws IOException { XContentBuilder mapping = fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .field("search_analyzer", "standard") .field("search_quote_analyzer", "keyword") @@ -445,7 +457,7 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { // special case: default index/search analyzer mapping = fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "default") .field("search_analyzer", "default") .field("search_quote_analyzer", "keyword") @@ -456,27 +468,27 @@ public void testSearchQuoteAnalyzerSerialization() throws IOException { public void testTermVectors() throws IOException { XContentBuilder mapping = mapping( b -> b.startObject("field1") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "no") .endObject() .startObject("field2") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "yes") .endObject() .startObject("field3") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "with_offsets") .endObject() .startObject("field4") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "with_positions") .endObject() .startObject("field5") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "with_positions_offsets") .endObject() .startObject("field6") - .field("type", "text") + .field("type", textFieldName) .field("term_vector", "with_positions_offsets_payloads") .endObject() ); @@ -526,7 +538,9 @@ public void testTermVectors() throws IOException { } public void testEagerGlobalOrdinals() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("eager_global_ordinals", true))); + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", textFieldName).field("eager_global_ordinals", true)) + ); FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); assertTrue(fieldMapper.fieldType().eagerGlobalOrdinals()); @@ -539,13 +553,13 @@ public void testFielddata() throws IOException { })); assertThat(e.getMessage(), containsString("Text fields are not optimised for operations that require per-document field data")); - MapperService enabledMapper = createMapperService(fieldMapping(b -> b.field("type", "text").field("fielddata", true))); + MapperService enabledMapper = createMapperService(fieldMapping(b -> b.field("type", textFieldName).field("fielddata", true))); enabledMapper.fieldType("field").fielddataBuilder("test", () -> { throw new UnsupportedOperationException(); }); // no exception // this time e = expectThrows( MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "text").field("index", false).field("fielddata", true))) + () -> createMapperService(fieldMapping(b -> b.field("type", textFieldName).field("index", false).field("fielddata", true))) ); assertThat(e.getMessage(), containsString("Cannot enable fielddata on a [text] field that is not indexed")); } @@ -553,7 +567,7 @@ public void testFielddata() throws IOException { public void testFrequencyFilter() throws IOException { MapperService mapperService = createMapperService( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("fielddata", true) .startObject("fielddata_frequency_filter") .field("min", 2d) @@ -571,17 +585,22 @@ public void testFrequencyFilter() throws IOException { public void testNullConfigValuesFail() throws MapperParsingException { Exception e = expectThrows( MapperParsingException.class, - () -> createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("analyzer", (String) null))) + () -> createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).field("analyzer", (String) null))) + ); + assertThat( + e.getMessage(), + containsString("[analyzer] on mapper [field] of type [" + textFieldName + "] must not have a [null] value") ); - assertThat(e.getMessage(), containsString("[analyzer] on mapper [field] of type [text] must not have a [null] value")); } public void testNotIndexedFieldPositionIncrement() { Exception e = expectThrows( MapperParsingException.class, - () -> createDocumentMapper(fieldMapping(b -> b.field("type", "text").field("index", false).field("position_increment_gap", 10))) + () -> createDocumentMapper( + fieldMapping(b -> b.field("type", textFieldName).field("index", false).field("position_increment_gap", 10)) + ) ); - assertThat(e.getMessage(), containsString("Cannot set position_increment_gap on field [field] without positions enabled")); + assertThat(e.getMessage(), containsString("Cannot set position_increment_gap on field [field]")); } public void testAnalyzedFieldPositionIncrementWithoutPositions() { @@ -589,7 +608,9 @@ public void testAnalyzedFieldPositionIncrementWithoutPositions() { Exception e = expectThrows( MapperParsingException.class, () -> createDocumentMapper( - fieldMapping(b -> b.field("type", "text").field("index_options", indexOptions).field("position_increment_gap", 10)) + fieldMapping( + b -> b.field("type", textFieldName).field("index_options", indexOptions).field("position_increment_gap", 10) + ) ) ); assertThat(e.getMessage(), containsString("Cannot set position_increment_gap on field [field] without positions enabled")); @@ -600,7 +621,7 @@ public void testIndexPrefixIndexTypes() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .endObject() @@ -615,7 +636,7 @@ public void testIndexPrefixIndexTypes() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .endObject() @@ -632,7 +653,7 @@ public void testIndexPrefixIndexTypes() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .endObject() @@ -649,7 +670,7 @@ public void testIndexPrefixIndexTypes() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .endObject() @@ -666,7 +687,7 @@ public void testIndexPrefixIndexTypes() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .endObject() @@ -682,62 +703,18 @@ public void testIndexPrefixIndexTypes() throws IOException { } public void testNestedIndexPrefixes() throws IOException { - { - MapperService mapperService = createMapperService( - mapping( - b -> b.startObject("object") - .field("type", "object") - .startObject("properties") - .startObject("field") - .field("type", "text") - .startObject("index_prefixes") - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - MappedFieldType textField = mapperService.fieldType("object.field"); - assertNotNull(textField); - assertThat(textField, instanceOf(TextFieldType.class)); - MappedFieldType prefix = ((TextFieldType) textField).getPrefixFieldType(); - assertEquals(prefix.name(), "object.field._index_prefix"); - FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("object.field._index_prefix"); - assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, mapper.fieldType.indexOptions()); - assertFalse(mapper.fieldType.storeTermVectorOffsets()); - } - { - MapperService mapperService = createMapperService( - mapping( - b -> b.startObject("body") - .field("type", "text") - .startObject("fields") - .startObject("with_prefix") - .field("type", "text") - .startObject("index_prefixes") - .endObject() - .endObject() - .endObject() - .endObject() - ) - ); - MappedFieldType textField = mapperService.fieldType("body.with_prefix"); - assertNotNull(textField); - assertThat(textField, instanceOf(TextFieldType.class)); - MappedFieldType prefix = ((TextFieldType) textField).getPrefixFieldType(); - assertEquals(prefix.name(), "body.with_prefix._index_prefix"); - FieldMapper mapper = (FieldMapper) mapperService.documentMapper().mappers().getMapper("body.with_prefix._index_prefix"); - assertEquals(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS, mapper.fieldType.indexOptions()); - assertFalse(mapper.fieldType.storeTermVectorOffsets()); - } } public void testFastPhraseMapping() throws IOException { MapperService mapperService = createMapperService(mapping(b -> { - b.startObject("field").field("type", "text").field("analyzer", "my_stop_analyzer").field("index_phrases", true).endObject(); + b.startObject("field") + .field("type", textFieldName) + .field("analyzer", "my_stop_analyzer") + .field("index_phrases", true) + .endObject(); // "standard" will be replaced with MockSynonymAnalyzer - b.startObject("synfield").field("type", "text").field("analyzer", "standard").field("index_phrases", true).endObject(); + b.startObject("synfield").field("type", textFieldName).field("analyzer", "standard").field("index_phrases", true).endObject(); })); QueryShardContext queryShardContext = createQueryShardContext(mapperService); @@ -808,14 +785,16 @@ protected TokenStreamComponents createComponents(String fieldName) { Exception e = expectThrows( MapperParsingException.class, - () -> createMapperService(fieldMapping(b -> b.field("type", "text").field("index", "false").field("index_phrases", true))) + () -> createMapperService( + fieldMapping(b -> b.field("type", textFieldName).field("index", "false").field("index_phrases", true)) + ) ); assertThat(e.getMessage(), containsString("Cannot set index_phrases on unindexed field [field]")); e = expectThrows( MapperParsingException.class, () -> createMapperService( - fieldMapping(b -> b.field("type", "text").field("index_options", "freqs").field("index_phrases", true)) + fieldMapping(b -> b.field("type", textFieldName).field("index_options", "freqs").field("index_phrases", true)) ) ); assertThat(e.getMessage(), containsString("Cannot set index_phrases on field [field] if positions are not enabled")); @@ -826,7 +805,7 @@ public void testIndexPrefixMapping() throws IOException { { DocumentMapper mapper = createDocumentMapper( fieldMapping( - b -> b.field("type", "text") + b -> b.field("type", textFieldName) .field("analyzer", "standard") .startObject("index_prefixes") .field("min_chars", 2) @@ -844,29 +823,29 @@ public void testIndexPrefixMapping() throws IOException { { DocumentMapper mapper = createDocumentMapper( - fieldMapping(b -> b.field("type", "text").field("analyzer", "standard").startObject("index_prefixes").endObject()) + fieldMapping(b -> b.field("type", textFieldName).field("analyzer", "standard").startObject("index_prefixes").endObject()) ); assertThat(mapper.mappers().getMapper("field._index_prefix").toString(), containsString("prefixChars=2:5")); } { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "text").nullField("index_prefixes"))); + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", textFieldName).nullField("index_prefixes"))); assertNull(mapper.mappers().getMapper("field._index_prefix")); } { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text").field("analyzer", "standard"); + b.field("type", textFieldName).field("analyzer", "standard"); b.startObject("index_prefixes").field("min_chars", 1).field("max_chars", 10).endObject(); - b.startObject("fields").startObject("_index_prefix").field("type", "text").endObject().endObject(); + b.startObject("fields").startObject("_index_prefix").field("type", textFieldName).endObject().endObject(); }))); assertThat(e.getMessage(), containsString("Field [field._index_prefix] is defined more than once")); } { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text").field("analyzer", "standard"); + b.field("type", textFieldName).field("analyzer", "standard"); b.startObject("index_prefixes").field("min_chars", 11).field("max_chars", 10).endObject(); }))); assertThat(e.getMessage(), containsString("min_chars [11] must be less than max_chars [10]")); @@ -874,7 +853,7 @@ public void testIndexPrefixMapping() throws IOException { { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text").field("analyzer", "standard"); + b.field("type", textFieldName).field("analyzer", "standard"); b.startObject("index_prefixes").field("min_chars", 0).field("max_chars", 10).endObject(); }))); assertThat(e.getMessage(), containsString("min_chars [0] must be greater than zero")); @@ -882,7 +861,7 @@ public void testIndexPrefixMapping() throws IOException { { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text").field("analyzer", "standard"); + b.field("type", textFieldName).field("analyzer", "standard"); b.startObject("index_prefixes").field("min_chars", 1).field("max_chars", 25).endObject(); }))); assertThat(e.getMessage(), containsString("max_chars [25] must be less than 20")); @@ -890,7 +869,7 @@ public void testIndexPrefixMapping() throws IOException { { MapperParsingException e = expectThrows(MapperParsingException.class, () -> createMapperService(fieldMapping(b -> { - b.field("type", "text").field("analyzer", "standard").field("index", false); + b.field("type", textFieldName).field("analyzer", "standard").field("index", false); b.startObject("index_prefixes").endObject(); }))); assertThat(e.getMessage(), containsString("Cannot set index_prefixes on unindexed field [field]")); @@ -901,14 +880,14 @@ public void testFastPhrasePrefixes() throws IOException { MapperService mapperService = createMapperService(mapping(b -> { b.startObject("field"); { - b.field("type", "text"); + b.field("type", textFieldName); b.field("analyzer", "my_stop_analyzer"); b.startObject("index_prefixes").field("min_chars", 2).field("max_chars", 10).endObject(); } b.endObject(); b.startObject("synfield"); { - b.field("type", "text"); + b.field("type", textFieldName); b.field("analyzer", "standard"); // "standard" will be replaced with MockSynonymAnalyzer b.field("index_phrases", true); b.startObject("index_prefixes").field("min_chars", 2).field("max_chars", 10).endObject(); @@ -999,7 +978,7 @@ public void testFastPhrasePrefixes() throws IOException { public void testSimpleMerge() throws IOException { XContentBuilder startingMapping = fieldMapping( - b -> b.field("type", "text").startObject("index_prefixes").endObject().field("index_phrases", true) + b -> b.field("type", textFieldName).startObject("index_prefixes").endObject().field("index_phrases", true) ); MapperService mapperService = createMapperService(startingMapping); assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(TextFieldMapper.class)); @@ -1008,19 +987,28 @@ public void testSimpleMerge() throws IOException { assertThat(mapperService.documentMapper().mappers().getMapper("field"), instanceOf(TextFieldMapper.class)); XContentBuilder differentPrefix = fieldMapping( - b -> b.field("type", "text").startObject("index_prefixes").field("min_chars", "3").endObject().field("index_phrases", true) + b -> b.field("type", textFieldName) + .startObject("index_prefixes") + .field("min_chars", "3") + .endObject() + .field("index_phrases", true) ); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, differentPrefix)); assertThat(e.getMessage(), containsString("Cannot update parameter [index_prefixes]")); XContentBuilder differentPhrases = fieldMapping( - b -> b.field("type", "text").startObject("index_prefixes").endObject().field("index_phrases", false) + b -> b.field("type", textFieldName).startObject("index_prefixes").endObject().field("index_phrases", false) ); e = expectThrows(IllegalArgumentException.class, () -> merge(mapperService, differentPhrases)); assertThat(e.getMessage(), containsString("Cannot update parameter [index_phrases]")); XContentBuilder newField = mapping(b -> { - b.startObject("field").field("type", "text").startObject("index_prefixes").endObject().field("index_phrases", true).endObject(); + b.startObject("field") + .field("type", textFieldName) + .startObject("index_prefixes") + .endObject() + .field("index_phrases", true) + .endObject(); b.startObject("other_field").field("type", "keyword").endObject(); }); merge(mapperService, newField); diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java index 0592a972db5e9..9c177bbec61fd 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java @@ -66,35 +66,39 @@ public class TextFieldTypeTests extends FieldTypeTestCase { - private static TextFieldType createFieldType() { - return new TextFieldType("field"); + TextFieldType createFieldType(boolean searchabe) { + if (searchabe) { + return new TextFieldType("field"); + } else { + return new TextFieldType("field", false, false, Collections.emptyMap()); + } } public void testIsAggregatableDependsOnFieldData() { - TextFieldType ft = createFieldType(); + TextFieldType ft = createFieldType(true); assertFalse(ft.isAggregatable()); ft.setFielddata(true); assertTrue(ft.isAggregatable()); } public void testTermQuery() { - MappedFieldType ft = createFieldType(); + MappedFieldType ft = createFieldType(true); assertEquals(new TermQuery(new Term("field", "foo")), ft.termQuery("foo", null)); assertEquals(AutomatonQueries.caseInsensitiveTermQuery(new Term("field", "fOo")), ft.termQueryCaseInsensitive("fOo", null)); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = createFieldType(false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("bar", null)); assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); } public void testTermsQuery() { - MappedFieldType ft = createFieldType(); + MappedFieldType ft = createFieldType(true); List<BytesRef> terms = new ArrayList<>(); terms.add(new BytesRef("foo")); terms.add(new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), null)); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = createFieldType(false); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> unsearchable.termsQuery(Arrays.asList("foo", "bar"), null) @@ -103,7 +107,7 @@ public void testTermsQuery() { } public void testRangeQuery() { - MappedFieldType ft = createFieldType(); + MappedFieldType ft = createFieldType(true); assertEquals( new TermRangeQuery("field", BytesRefs.toBytesRef("foo"), BytesRefs.toBytesRef("bar"), true, false), ft.rangeQuery("foo", "bar", true, false, null, null, null, MOCK_QSC) @@ -120,13 +124,13 @@ public void testRangeQuery() { } public void testRegexpQuery() { - MappedFieldType ft = createFieldType(); + MappedFieldType ft = createFieldType(true); assertEquals( new RegexpQuery(new Term("field", "foo.*")), ft.regexpQuery("foo.*", 0, 0, 10, CONSTANT_SCORE_BLENDED_REWRITE, MOCK_QSC) ); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = createFieldType(false); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> unsearchable.regexpQuery("foo.*", 0, 0, 10, null, MOCK_QSC) @@ -141,13 +145,13 @@ public void testRegexpQuery() { } public void testFuzzyQuery() { - MappedFieldType ft = createFieldType(); + MappedFieldType ft = createFieldType(true); assertEquals( new FuzzyQuery(new Term("field", "foo"), 2, 1, 50, true), ft.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC) ); - MappedFieldType unsearchable = new TextFieldType("field", false, false, Collections.emptyMap()); + MappedFieldType unsearchable = createFieldType(false); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> unsearchable.fuzzyQuery("foo", Fuzziness.fromEdits(2), 1, 50, true, MOCK_QSC) @@ -162,7 +166,7 @@ public void testFuzzyQuery() { } public void testIndexPrefixes() { - TextFieldType ft = createFieldType(); + TextFieldType ft = createFieldType(true); ft.setPrefixFieldType(new TextFieldMapper.PrefixFieldType(ft, "field._index_prefix", 2, 10)); Query q = ft.prefixQuery("goin", CONSTANT_SCORE_REWRITE, false, randomMockShardContext()); @@ -222,7 +226,7 @@ public void testIndexPrefixes() { } public void testFetchSourceValue() throws IOException { - TextFieldType fieldType = createFieldType(); + TextFieldType fieldType = createFieldType(true); fieldType.setIndexAnalyzer(Lucene.STANDARD_ANALYZER); assertEquals(List.of("value"), fetchSourceValue(fieldType, "value")); diff --git a/server/src/test/java/org/opensearch/index/query/SourceFieldMatchQueryTests.java b/server/src/test/java/org/opensearch/index/query/SourceFieldMatchQueryTests.java new file mode 100644 index 0000000000000..6af717a97b328 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/query/SourceFieldMatchQueryTests.java @@ -0,0 +1,173 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.query; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.store.Directory; +import org.opensearch.core.index.Index; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.MapperServiceTestCase; +import org.opensearch.index.mapper.ParsedDocument; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import static org.mockito.Mockito.when; + +public class SourceFieldMatchQueryTests extends MapperServiceTestCase { + + public void testAllPossibleScenarios() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("dessert"); + { + b.field("type", "match_only_text"); + } + b.endObject(); + })); + + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + when(queryShardContext.sourcePath("dessert")).thenReturn(Set.of("dessert")); + when(queryShardContext.index()).thenReturn(new Index("test_index", "uuid")); + + String[] desserts = new String[] { "apple pie pie", "banana split pie", "chocolate cake" }; + List<ParsedDocument> docs = new ArrayList<>(); + for (String dessert : desserts) { + docs.add(mapperService.documentMapper().parse(source(b -> b.field("dessert", dessert)))); + } + SourceFieldMatchQuery matchBoth = new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "apple").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "pie").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + + SourceFieldMatchQuery matchDelegate = new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "apple").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "juice").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + + SourceFieldMatchQuery matchFilter = new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "tart").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "pie").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + + SourceFieldMatchQuery matchNone = new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "gulab").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "jamun").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + + SourceFieldMatchQuery matchMultipleDocs = new SourceFieldMatchQuery( + QueryBuilders.matchAllQuery().toQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "pie").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + try (Directory dir = newDirectory()) { + IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(mapperService.indexAnalyzer())); + for (ParsedDocument d : docs) { + iw.addDocument(d.rootDoc()); + } + try (IndexReader reader = DirectoryReader.open(iw)) { + iw.close(); + IndexSearcher searcher = new IndexSearcher(reader); + TopDocs topDocs = searcher.search(matchBoth, 10); + assertEquals(topDocs.totalHits.value, 1); + assertEquals(topDocs.scoreDocs[0].doc, 0); + + topDocs = searcher.search(matchDelegate, 10); + assertEquals(topDocs.totalHits.value, 0); + + topDocs = searcher.search(matchFilter, 10); + assertEquals(topDocs.totalHits.value, 0); + + topDocs = searcher.search(matchNone, 10); + assertEquals(topDocs.totalHits.value, 0); + + topDocs = searcher.search(matchMultipleDocs, 10); + assertEquals(topDocs.totalHits.value, 2); + // assert constant score + for (ScoreDoc scoreDoc : topDocs.scoreDocs) { + assertEquals(scoreDoc.score, 1.0, 0.00000000001); + } + } + } + } + + public void testSourceDisabled() throws IOException { + MapperService mapperService = createMapperService(topMapping(b -> b.startObject("_source").field("enabled", false).endObject())); + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + when(queryShardContext.sourcePath("dessert")).thenReturn(Set.of("dessert")); + when(queryShardContext.index()).thenReturn(new Index("test_index", "uuid")); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "apple").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("dessert", "pie").doToQuery(queryShardContext), // Filter query + queryShardContext.getFieldType("dessert"), + queryShardContext + ) + ); + assertEquals( + "SourceFieldMatchQuery error: unable to fetch fields from _source field: " + + "_source is disabled in the mappings for index [test_index]", + e.getMessage() + ); + } + + public void testMissingField() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("dessert"); + { + b.field("type", "match_only_text"); + } + b.endObject(); + })); + QueryShardContext queryShardContext = createQueryShardContext(mapperService); + when(queryShardContext.sourcePath("dessert")).thenReturn(Set.of("dessert")); + when(queryShardContext.index()).thenReturn(new Index("test_index", "uuid")); + + String[] desserts = new String[] { "apple pie pie", "banana split pie", "chocolate cake" }; + List<ParsedDocument> docs = new ArrayList<>(); + for (String dessert : desserts) { + docs.add(mapperService.documentMapper().parse(source(b -> b.field("dessert", dessert)))); + } + SourceFieldMatchQuery matchDelegate = new SourceFieldMatchQuery( + QueryBuilders.matchQuery("dessert", "apple").doToQuery(queryShardContext), // Delegate query + QueryBuilders.matchQuery("username", "pie").doToQuery(queryShardContext), // Filter query missing field + queryShardContext.getFieldType("dessert"), + queryShardContext + ); + try (Directory dir = newDirectory()) { + IndexWriter iw = new IndexWriter(dir, new IndexWriterConfig(mapperService.indexAnalyzer())); + for (ParsedDocument d : docs) { + iw.addDocument(d.rootDoc()); + } + try (IndexReader reader = DirectoryReader.open(iw)) { + iw.close(); + IndexSearcher searcher = new IndexSearcher(reader); + TopDocs topDocs = searcher.search(matchDelegate, 10); + assertEquals(topDocs.totalHits.value, 0); + } + } + } +} diff --git a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java index 94c2e4ef7da62..ac78a0d1936ea 100644 --- a/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/mapper/MapperServiceTestCase.java @@ -239,7 +239,7 @@ protected final XContentBuilder fieldMapping(CheckedConsumer<XContentBuilder, IO }); } - QueryShardContext createQueryShardContext(MapperService mapperService) { + protected QueryShardContext createQueryShardContext(MapperService mapperService) { QueryShardContext queryShardContext = mock(QueryShardContext.class); when(queryShardContext.getMapperService()).thenReturn(mapperService); when(queryShardContext.fieldMapper(anyString())).thenAnswer(inv -> mapperService.fieldType(inv.getArguments()[0].toString())); @@ -254,6 +254,8 @@ QueryShardContext createQueryShardContext(MapperService mapperService) { when(queryShardContext.lookup()).thenReturn(new SearchLookup(mapperService, (ft, s) -> { throw new UnsupportedOperationException("search lookup not available"); })); + when(queryShardContext.getFieldType(any())).thenAnswer(inv -> mapperService.fieldType(inv.getArguments()[0].toString())); + when(queryShardContext.documentMapper(anyString())).thenReturn(mapperService.documentMapper()); return queryShardContext; } } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java index 82f15a590bea6..ac0447dbebf7e 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/AggregatorTestCase.java @@ -103,6 +103,7 @@ import org.opensearch.index.mapper.Mapper; import org.opensearch.index.mapper.Mapper.BuilderContext; import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.MatchOnlyTextFieldMapper; import org.opensearch.index.mapper.NumberFieldMapper; import org.opensearch.index.mapper.ObjectMapper; import org.opensearch.index.mapper.ObjectMapper.Nested; @@ -760,7 +761,8 @@ public void testSupportedFieldTypes() throws IOException { source.put("type", mappedType.getKey()); // Text is the only field that doesn't support DVs, instead FD - if (mappedType.getKey().equals(TextFieldMapper.CONTENT_TYPE) == false) { + if (mappedType.getKey().equals(TextFieldMapper.CONTENT_TYPE) == false + && mappedType.getKey().equals(MatchOnlyTextFieldMapper.CONTENT_TYPE) == false) { source.put("doc_values", "true"); } From bb3959d75c729c9213374db4109c628d1774491c Mon Sep 17 00:00:00 2001 From: Ticheng Lin <51488860+ticheng-aws@users.noreply.github.com> Date: Wed, 3 Jan 2024 04:23:11 -0800 Subject: [PATCH 045/178] Fix flaky testTerminateAfterEarlyTermination (#11683) Signed-off-by: Ticheng Lin <ticheng@amazon.com> --- .../search/query/QueryProfilePhaseTests.java | 74 +++++++++++++------ 1 file changed, 52 insertions(+), 22 deletions(-) diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index ba1600e6eb651..48ac2d3b5a804 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -615,12 +615,22 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); if (executor != null) { - assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThan(0L)); - assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + long maxScore = query.getTimeBreakdown().get("max_score"); + long minScore = query.getTimeBreakdown().get("min_score"); + long avgScore = query.getTimeBreakdown().get("avg_score"); + long maxScoreCount = query.getTimeBreakdown().get("max_score_count"); + long minScoreCount = query.getTimeBreakdown().get("min_score_count"); + long avgScoreCount = query.getTimeBreakdown().get("avg_score_count"); + assertThat(maxScore, greaterThan(0L)); + assertThat(minScore, greaterThanOrEqualTo(0L)); + assertThat(avgScore, greaterThanOrEqualTo(0L)); + assertThat(maxScore, greaterThanOrEqualTo(avgScore)); + assertThat(avgScore, greaterThanOrEqualTo(minScore)); + assertThat(maxScoreCount, greaterThan(0L)); + assertThat(minScoreCount, greaterThanOrEqualTo(0L)); + assertThat(avgScoreCount, greaterThanOrEqualTo(0L)); + assertThat(maxScoreCount, greaterThanOrEqualTo(avgScoreCount)); + assertThat(avgScoreCount, greaterThanOrEqualTo(minScoreCount)); } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -744,30 +754,50 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThanOrEqualTo(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(0L)); if (executor != null) { - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score_count"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score_count"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + long maxScore = query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score"); + long minScore = query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score"); + long avgScore = query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score"); + long maxScoreCount = query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score_count"); + long minScoreCount = query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score_count"); + long avgScoreCount = query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score_count"); + assertThat(maxScore, greaterThanOrEqualTo(0L)); + assertThat(minScore, greaterThanOrEqualTo(0L)); + assertThat(avgScore, greaterThanOrEqualTo(0L)); + assertThat(maxScore, greaterThanOrEqualTo(avgScore)); + assertThat(avgScore, greaterThanOrEqualTo(minScore)); + assertThat(maxScoreCount, greaterThanOrEqualTo(0L)); + assertThat(minScoreCount, greaterThanOrEqualTo(0L)); + assertThat(avgScoreCount, greaterThanOrEqualTo(0L)); + assertThat(maxScoreCount, greaterThanOrEqualTo(avgScoreCount)); + assertThat(avgScoreCount, greaterThanOrEqualTo(minScoreCount)); } assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery")); assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThanOrEqualTo(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(0L)); if (executor != null) { - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score_count"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score_count"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + long maxScore = query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score"); + long minScore = query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score"); + long avgScore = query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score"); + long maxScoreCount = query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score_count"); + long minScoreCount = query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score_count"); + long avgScoreCount = query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score_count"); + assertThat(maxScore, greaterThanOrEqualTo(0L)); + assertThat(minScore, greaterThanOrEqualTo(0L)); + assertThat(avgScore, greaterThanOrEqualTo(0L)); + assertThat(maxScore, greaterThanOrEqualTo(avgScore)); + assertThat(avgScore, greaterThanOrEqualTo(minScore)); + assertThat(maxScoreCount, greaterThanOrEqualTo(0L)); + assertThat(minScoreCount, greaterThanOrEqualTo(0L)); + assertThat(avgScoreCount, greaterThanOrEqualTo(0L)); + assertThat(maxScoreCount, greaterThanOrEqualTo(avgScoreCount)); + assertThat(avgScoreCount, greaterThanOrEqualTo(minScoreCount)); } }, collector -> { assertThat(collector.getReason(), equalTo("search_terminate_after_count")); From 84404689d6259b317c3eaaf94895f369e9920434 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Wed, 3 Jan 2024 11:14:52 -0800 Subject: [PATCH 046/178] Fix testIndexDeletionDuringSnapshotCreationInQueue flaky test (#11726) * Fix testIndexDeletionDuringSnapshotCreationInQueue flaky test Signed-off-by: Suraj Singh <surajrider@gmail.com> * Update comment Signed-off-by: Suraj Singh <surajrider@gmail.com> --------- Signed-off-by: Suraj Singh <surajrider@gmail.com> --- .../snapshots/DedicatedClusterSnapshotRestoreIT.java | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java index bc591de45dd86..7a52c8aa5018e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/DedicatedClusterSnapshotRestoreIT.java @@ -1457,6 +1457,13 @@ public void testIndexDeletionDuringSnapshotCreationInQueue() throws Exception { clusterAdmin().prepareRestoreSnapshot("test-repo", "test-snap").get(); ensureGreen("test-idx"); + + // Wait for snapshot process to complete to prevent conflict with repository clean up + assertBusy(() -> { + SnapshotInfo snapshotInfo = getSnapshot("test-repo", "test-snap-2"); + assertTrue(snapshotInfo.state().completed()); + assertEquals(SnapshotState.PARTIAL, snapshotInfo.state()); + }, 1, TimeUnit.MINUTES); } private long calculateTotalFilesSize(List<Path> files) { From 16d457d3f018893f6268a9ffa52d3d3f73f1a87a Mon Sep 17 00:00:00 2001 From: Peter Nied <petern@amazon.com> Date: Wed, 3 Jan 2024 15:28:45 -0600 Subject: [PATCH 047/178] Switch to more reliable OpenSearch Lucene snapshot location (#11728) * Switched to more reliable OpenSearch Lucene snapshot location - Related https://github.com/opensearch-project/opensearch-build/issues/3874 Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> * Changelog entry Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> --------- Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> --- CHANGELOG.md | 2 +- .../java/org/opensearch/gradle/RepositoriesSetupPlugin.java | 2 +- gradle/code-coverage.gradle | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 026606ff57d65..f5191c5e04a41 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,7 +57,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Return 409 Conflict HTTP status instead of 503 on failure to concurrently execute snapshots ([#8986](https://github.com/opensearch-project/OpenSearch/pull/5855)) - Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/)) - Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894)) - +- Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728)) ### Deprecated diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java index 63b88f671c84c..8ecfbf40b6c62 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java @@ -94,7 +94,7 @@ public static void configureRepositories(Project project) { String revision = matcher.group(1); MavenArtifactRepository luceneRepo = repos.maven(repo -> { repo.setName("lucene-snapshots"); - repo.setUrl("https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/"); + repo.setUrl("https://artifacts.opensearch.org/snapshots/lucene/"); }); repos.exclusiveContent(exclusiveRepo -> { exclusiveRepo.filter( diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle index dfb4ddba24113..822b471e2e034 100644 --- a/gradle/code-coverage.gradle +++ b/gradle/code-coverage.gradle @@ -13,7 +13,7 @@ repositories { gradlePluginPortal() // TODO: Find the way to use the repositories from RepositoriesSetupPlugin maven { - url = "https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/" + url = "https://artifacts.opensearch.org/snapshots/lucene/" } } From 36bd67465a12bbc56631f49a617429529a8365f9 Mon Sep 17 00:00:00 2001 From: rayshrey <121871912+rayshrey@users.noreply.github.com> Date: Thu, 4 Jan 2024 18:59:18 +0530 Subject: [PATCH 048/178] Add deleted doc count in _cat/shards (#11678) Signed-off-by: Shreyansh Ray <rayshrey@amazon.com> --- CHANGELOG.md | 1 + .../test/cat.shards/10_basic.yml | 94 ++++++++++++++++++- .../rest/action/cat/RestShardsAction.java | 2 + .../action/cat/RestShardsActionTests.java | 10 +- 4 files changed, 104 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f5191c5e04a41..aa2086646fbac 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -181,6 +181,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) - Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) +- Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) ### Deprecated diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml index f80c9f9c0bc80..b572ed9e62ea9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml @@ -1,13 +1,103 @@ "Help": - skip: - version: " - 2.3.99" + version: " - 2.11.99" + reason: deleted docs added in 2.12.0 + features: node_selector + - do: + cat.shards: + help: true + node_selector: + version: "2.12.0 - " + + - match: + $body: | + /^ index .+ \n + shard .+ \n + prirep .+ \n + state .+ \n + docs .+ \n + store .+ \n + ip .+ \n + id .+ \n + node .+ \n + sync_id .+ \n + unassigned.reason .+ \n + unassigned.at .+ \n + unassigned.for .+ \n + unassigned.details .+ \n + recoverysource.type .+ \n + completion.size .+ \n + fielddata.memory_size .+ \n + fielddata.evictions .+ \n + query_cache.memory_size .+ \n + query_cache.evictions .+ \n + flush.total .+ \n + flush.total_time .+ \n + get.current .+ \n + get.time .+ \n + get.total .+ \n + get.exists_time .+ \n + get.exists_total .+ \n + get.missing_time .+ \n + get.missing_total .+ \n + indexing.delete_current .+ \n + indexing.delete_time .+ \n + indexing.delete_total .+ \n + indexing.index_current .+ \n + indexing.index_time .+ \n + indexing.index_total .+ \n + indexing.index_failed .+ \n + merges.current .+ \n + merges.current_docs .+ \n + merges.current_size .+ \n + merges.total .+ \n + merges.total_docs .+ \n + merges.total_size .+ \n + merges.total_time .+ \n + refresh.total .+ \n + refresh.time .+ \n + refresh.external_total .+ \n + refresh.external_time .+ \n + refresh.listeners .+ \n + search.fetch_current .+ \n + search.fetch_time .+ \n + search.fetch_total .+ \n + search.open_contexts .+ \n + search.query_current .+ \n + search.query_time .+ \n + search.query_total .+ \n + search.scroll_current .+ \n + search.scroll_time .+ \n + search.scroll_total .+ \n + search.point_in_time_current .+ \n + search.point_in_time_time .+ \n + search.point_in_time_total .+ \n + segments.count .+ \n + segments.memory .+ \n + segments.index_writer_memory .+ \n + segments.version_map_memory .+ \n + segments.fixed_bitset_memory .+ \n + seq_no.max .+ \n + seq_no.local_checkpoint .+ \n + seq_no.global_checkpoint .+ \n + warmer.current .+ \n + warmer.total .+ \n + warmer.total_time .+ \n + path.data .+ \n + path.state .+ \n + docs.deleted .+ \n + $/ +--- +"Help from 2.4.0 to 2.11.0": + - skip: + version: " - 2.3.99 , 2.12.0 - " reason: point in time stats were added in 2.4.0 features: node_selector - do: cat.shards: help: true node_selector: - version: "2.4.0 - " + version: "2.4.0 - 2.11.99" - match: $body: | diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java index 5d5f55c7f4639..d0d00e4c4596a 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java @@ -283,6 +283,7 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("path.data", "alias:pd,dataPath;default:false;text-align:right;desc:shard data path"); table.addCell("path.state", "alias:ps,statsPath;default:false;text-align:right;desc:shard state path"); + table.addCell("docs.deleted", "alias:dd,docsDeleted;default:false;text-align:right;desc:number of deleted docs in shard"); table.endHeaders(); return table; @@ -448,6 +449,7 @@ Table buildTable(RestRequest request, ClusterStateResponse state, IndicesStatsRe table.addCell(getOrNull(shardStats, ShardStats::getDataPath, s -> s)); table.addCell(getOrNull(shardStats, ShardStats::getStatePath, s -> s)); + table.addCell(getOrNull(commonStats, CommonStats::getDocs, DocsStats::getDeleted)); table.endRow(); } diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java index a8679a087216d..73f83a5642bb4 100644 --- a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java @@ -46,6 +46,7 @@ import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.cluster.routing.TestShardRouting; import org.opensearch.common.Table; +import org.opensearch.index.shard.DocsStats; import org.opensearch.index.shard.ShardPath; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.FakeRestRequest; @@ -65,6 +66,8 @@ public class RestShardsActionTests extends OpenSearchTestCase { public void testBuildTable() { final int numShards = randomIntBetween(1, 5); + long numDocs = randomLongBetween(0, 10000); + long numDeletedDocs = randomLongBetween(0, 100); DiscoveryNode localNode = new DiscoveryNode("local", buildNewFakeTransportAddress(), Version.CURRENT); List<ShardRouting> shardRoutings = new ArrayList<>(numShards); @@ -76,10 +79,12 @@ public void testBuildTable() { Path path = createTempDir().resolve("indices") .resolve(shardRouting.shardId().getIndex().getUUID()) .resolve(String.valueOf(shardRouting.shardId().id())); + CommonStats commonStats = new CommonStats(); + commonStats.docs = new DocsStats(numDocs, numDeletedDocs, 0); ShardStats shardStats = new ShardStats( shardRouting, new ShardPath(false, path, path, shardRouting.shardId()), - null, + commonStats, null, null, null @@ -120,6 +125,7 @@ public void testBuildTable() { assertThat(headers.get(6).value, equalTo("ip")); assertThat(headers.get(7).value, equalTo("id")); assertThat(headers.get(8).value, equalTo("node")); + assertThat(headers.get(74).value, equalTo("docs.deleted")); final List<List<Table.Cell>> rows = table.getRows(); assertThat(rows.size(), equalTo(numShards)); @@ -132,10 +138,12 @@ public void testBuildTable() { assertThat(row.get(1).value, equalTo(shardRouting.getId())); assertThat(row.get(2).value, equalTo(shardRouting.primary() ? "p" : "r")); assertThat(row.get(3).value, equalTo(shardRouting.state())); + assertThat(row.get(4).value, equalTo(shardStats.getStats().getDocs().getCount())); assertThat(row.get(6).value, equalTo(localNode.getHostAddress())); assertThat(row.get(7).value, equalTo(localNode.getId())); assertThat(row.get(72).value, equalTo(shardStats.getDataPath())); assertThat(row.get(73).value, equalTo(shardStats.getStatePath())); + assertThat(row.get(74).value, equalTo(shardStats.getStats().getDocs().getDeleted())); } } } From f6475152aece1083add15ac6cac31758eb711d9e Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Thu, 4 Jan 2024 21:48:32 +0800 Subject: [PATCH 049/178] Fix simulate remove ingest processor throwing illegal_argument_exception (#11607) * Fix simulate remove ingest processor throwing illegal_argument_exception Signed-off-by: Gao Binlong <gbinlong@amazon.com> * modify change log Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Create a new test mothod Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Use old method to get field value Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 2 +- .../ingest/common/RemoveProcessor.java | 20 ++-- .../ingest/common/RemoveProcessorTests.java | 97 +++++++++++++++++++ .../test/ingest/290_remove_processor.yml | 92 ++++++++++++++++++ 4 files changed, 201 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa2086646fbac..9b5f2dc5a16f2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -170,7 +170,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395)) - Performance improvement for Datetime field caching ([#4558](https://github.com/opensearch-project/OpenSearch/issues/4558)) - Add instrumentation for indexing in transport bulk action and transport shard bulk action. ([#10273](https://github.com/opensearch-project/OpenSearch/pull/10273)) -- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895)) +- Disallow removing some metadata fields by remove ingest processor ([#10895](https://github.com/opensearch-project/OpenSearch/pull/10895), [#11607](https://github.com/opensearch-project/OpenSearch/pull/11607)) - Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057)) - Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) - Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java index bb3d4bca47859..a48cfd87b78c3 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java @@ -88,16 +88,18 @@ public IngestDocument execute(IngestDocument document) { throw new IllegalArgumentException("cannot remove metadata field [" + path + "]"); } // removing _id is disallowed when there's an external version specified in the request - String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); if (path.equals(IngestDocument.Metadata.ID.getFieldName()) - && !Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { - Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class); - throw new IllegalArgumentException( - "cannot remove metadata field [_id] when specifying external version for the document, version: " - + version - + ", version_type: " - + versionType - ); + && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { + String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); + if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { + Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true); + throw new IllegalArgumentException( + "cannot remove metadata field [_id] when specifying external version for the document, version: " + + version + + ", version_type: " + + versionType + ); + } } document.removeField(path); }); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java index 1a5630a4730f2..c138ad606d2e5 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java @@ -32,6 +32,7 @@ package org.opensearch.ingest.common; +import org.opensearch.common.lucene.uid.Versions; import org.opensearch.index.VersionType; import org.opensearch.ingest.IngestDocument; import org.opensearch.ingest.Processor; @@ -181,4 +182,100 @@ public void testRemoveMetadataField() throws Exception { } } } + + public void testRemoveDocumentId() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("field", IngestDocument.Metadata.ID.getFieldName()); + String processorTag = randomAlphaOfLength(10); + + // test remove _id when _version_type is external + IngestDocument ingestDocumentWithExternalVersionType = new IngestDocument( + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + 1L, + VersionType.EXTERNAL, + RandomDocumentPicks.randomSource(random()) + ); + + Processor processorForExternalVersionType = new RemoveProcessor.Factory(TestTemplateService.instance()).create( + null, + processorTag, + null, + config + ); + assertThrows( + "cannot remove metadata field [_id] when specifying external version for the document, version: " + + 1 + + ", version_type: " + + VersionType.EXTERNAL, + IllegalArgumentException.class, + () -> processorForExternalVersionType.execute(ingestDocumentWithExternalVersionType) + ); + + // test remove _id when _version_type is external_gte + config.put("field", IngestDocument.Metadata.ID.getFieldName()); + IngestDocument ingestDocumentWithExternalGTEVersionType = new IngestDocument( + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + 1L, + VersionType.EXTERNAL_GTE, + RandomDocumentPicks.randomSource(random()) + ); + + Processor processorForExternalGTEVersionType = new RemoveProcessor.Factory(TestTemplateService.instance()).create( + null, + processorTag, + null, + config + ); + assertThrows( + "cannot remove metadata field [_id] when specifying external version for the document, version: " + + 1 + + ", version_type: " + + VersionType.EXTERNAL_GTE, + IllegalArgumentException.class, + () -> processorForExternalGTEVersionType.execute(ingestDocumentWithExternalGTEVersionType) + ); + + // test remove _id when _version_type is internal + config.put("field", IngestDocument.Metadata.ID.getFieldName()); + IngestDocument ingestDocumentWithInternalVersionType = new IngestDocument( + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + Versions.MATCH_ANY, + VersionType.INTERNAL, + RandomDocumentPicks.randomSource(random()) + ); + + Processor processorForInternalVersionType = new RemoveProcessor.Factory(TestTemplateService.instance()).create( + null, + processorTag, + null, + config + ); + processorForInternalVersionType.execute(ingestDocumentWithInternalVersionType); + assertThat(ingestDocumentWithInternalVersionType.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(false)); + + // test remove _id when _version_type is null + config.put("field", IngestDocument.Metadata.ID.getFieldName()); + IngestDocument ingestDocumentWithNoVersionType = new IngestDocument( + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + RandomDocumentPicks.randomString(random()), + null, + null, + RandomDocumentPicks.randomSource(random()) + ); + Processor processorForNullVersionType = new RemoveProcessor.Factory(TestTemplateService.instance()).create( + null, + processorTag, + null, + config + ); + processorForNullVersionType.execute(ingestDocumentWithNoVersionType); + assertThat(ingestDocumentWithNoVersionType.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(false)); + } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml index 4811769d04f0e..6668b468f8edc 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml @@ -5,6 +5,69 @@ teardown: id: "my_pipeline" ignore: 404 +--- +"Test simulate API works well with remove processor": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove" : { + "field" : "{{foo}}" + } + } + ] + } + - match: { acknowledged: true } + + # test simulating existing pipeline works well + - do: + ingest.simulate: + id: "my_pipeline" + body: > + { + "docs": [ + { + "_source": { + "foo": "bar", + "bar": "zoo" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._source: { "foo": "bar" } } + + # test simulating inflight pipeline works well + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "remove" : { + "field" : "{{foo}}" + } + } + ] + }, + "docs": [ + { + "_source": { + "foo": "bar", + "bar": "zoo" + } + } + ] + } + - length: { docs: 1 } + - match: { docs.0.doc._source: { "foo": "bar" } } + --- "Test remove processor with non-existing field and without ignore_missing": - do: @@ -227,3 +290,32 @@ teardown: version: 1 version_type: "external" body: { message: "foo bar baz" } + + # test simulating pipeline with removing _id + - do: + ingest.simulate: + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "remove" : { + "field" : "_id" + } + } + ] + }, + "docs": [ + { + "_version_type": "external_gte", + "_version": 1, + "_source": { + "foo": "bar", + "bar": "zoo" + } + } + ] + } + - match: { docs.0.error.type: "illegal_argument_exception" } + - match: { docs.0.error.reason: "cannot remove metadata field [_id] when specifying external version for the document, version: 1, version_type: external_gte" } From 178a7a073e1236d552f697e7f67d32f04a7fe8ec Mon Sep 17 00:00:00 2001 From: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> Date: Thu, 4 Jan 2024 10:12:35 -0800 Subject: [PATCH 050/178] Fixes Numeric exact match queries to use range queries internally (#11209) * Updating numeric term and terms queries to use IODVQ Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Addressing comments Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix formatting Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix changelog Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Addressing more comments + adding tests Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * renaming yaml test Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Adding skip for bwc Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Adding new SortedUnsignedLongDocValuesSetQuery to allow for BitInteger Terms query Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fixing some tests Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Remove duplicate skip Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Remove unused points declaration Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Change unsigned exact query to be consistent Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Use slowExactQuery from Unsigned Set Query Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Merging different yaml tests into a single test Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Updating test case for main Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix changelog Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> --------- Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> --- CHANGELOG.md | 1 + .../index/mapper/ScaledFloatFieldMapper.java | 17 +- .../mapper/ScaledFloatFieldTypeTests.java | 19 +- .../test/search/340_doc_values_field.yml | 1147 +++++++++++++++++ .../test/search/340_keyword_doc_values.yml | 46 - .../org/apache/lucene/util/LongHashSet.java | 136 ++ .../SortedUnsignedLongDocValuesSetQuery.java | 176 +++ .../index/mapper/NumberFieldMapper.java | 290 ++++- .../index/mapper/NumberFieldTypeTests.java | 90 +- .../index/query/TermQueryBuilderTests.java | 2 + .../index/query/TermsQueryBuilderTests.java | 2 + 11 files changed, 1795 insertions(+), 131 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/340_keyword_doc_values.yml create mode 100644 server/src/main/java/org/apache/lucene/util/LongHashSet.java create mode 100644 server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 9b5f2dc5a16f2..ac15176af56f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -176,6 +176,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) - Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087)) - Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528)) +- Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209)) - Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) - Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java index 7be241017f683..400d867296e5f 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java @@ -199,9 +199,9 @@ public String typeName() { @Override public Query termQuery(Object value, QueryShardContext context) { - failIfNotIndexed(); + failIfNotIndexedAndNoDocValues(); long scaledValue = Math.round(scale(value)); - Query query = NumberFieldMapper.NumberType.LONG.termQuery(name(), scaledValue); + Query query = NumberFieldMapper.NumberType.LONG.termQuery(name(), scaledValue, hasDocValues(), isSearchable()); if (boost() != 1f) { query = new BoostQuery(query, boost()); } @@ -210,13 +210,18 @@ public Query termQuery(Object value, QueryShardContext context) { @Override public Query termsQuery(List<?> values, QueryShardContext context) { - failIfNotIndexed(); + failIfNotIndexedAndNoDocValues(); List<Long> scaledValues = new ArrayList<>(values.size()); for (Object value : values) { long scaledValue = Math.round(scale(value)); scaledValues.add(scaledValue); } - Query query = NumberFieldMapper.NumberType.LONG.termsQuery(name(), Collections.unmodifiableList(scaledValues)); + Query query = NumberFieldMapper.NumberType.LONG.termsQuery( + name(), + Collections.unmodifiableList(scaledValues), + hasDocValues(), + isSearchable() + ); if (boost() != 1f) { query = new BoostQuery(query, boost()); } @@ -225,7 +230,7 @@ public Query termsQuery(List<?> values, QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - failIfNotIndexed(); + failIfNotIndexedAndNoDocValues(); Long lo = null; if (lowerTerm != null) { double dValue = scale(lowerTerm); @@ -242,7 +247,7 @@ public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower } hi = Math.round(Math.floor(dValue)); } - Query query = NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues(), context); + Query query = NumberFieldMapper.NumberType.LONG.rangeQuery(name(), lo, hi, true, true, hasDocValues(), isSearchable(), context); if (boost() != 1f) { query = new BoostQuery(query, boost()); } diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java index be12c49321b87..d83811e6668eb 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -34,11 +34,13 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -63,7 +65,9 @@ public void testTermQuery() { ); double value = (randomDouble() * 2 - 1) * 10000; long scaledValue = Math.round(value * ft.getScalingFactor()); - assertEquals(LongPoint.newExactQuery("scaled_float", scaledValue), ft.termQuery(value, null)); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery("scaled_float", scaledValue); + Query query = new IndexOrDocValuesQuery(LongPoint.newExactQuery("scaled_float", scaledValue), dvQuery); + assertEquals(query, ft.termQuery(value, null)); } public void testTermsQuery() { @@ -75,7 +79,7 @@ public void testTermsQuery() { long scaledValue1 = Math.round(value1 * ft.getScalingFactor()); double value2 = (randomDouble() * 2 - 1) * 10000; long scaledValue2 = Math.round(value2 * ft.getScalingFactor()); - assertEquals(LongPoint.newSetQuery("scaled_float", scaledValue1, scaledValue2), ft.termsQuery(Arrays.asList(value1, value2), null)); + assertEquals(LongField.newSetQuery("scaled_float", scaledValue1, scaledValue2), ft.termsQuery(Arrays.asList(value1, value2), null)); } public void testRangeQuery() throws IOException { @@ -112,7 +116,16 @@ public void testRangeQuery() throws IOException { Double u = randomBoolean() ? null : (randomDouble() * 2 - 1) * 10000; boolean includeLower = randomBoolean(); boolean includeUpper = randomBoolean(); - Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper, false, MOCK_QSC); + Query doubleQ = NumberFieldMapper.NumberType.DOUBLE.rangeQuery( + "double", + l, + u, + includeLower, + includeUpper, + false, + true, + MOCK_QSC + ); Query scaledFloatQ = ft.rangeQuery(l, u, includeLower, includeUpper, MOCK_QSC); assertEquals(searcher.count(doubleQ), searcher.count(scaledFloatQ)); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml new file mode 100644 index 0000000000000..f3281e35ac8e6 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml @@ -0,0 +1,1147 @@ +setup: + - skip: + features: [ "headers" ] + version: " - 2.99.99" + reason: "searching with only doc_values was added in 3.0.0" +--- +"search on fields with both index and doc_values enabled": + - do: + indices.create: + index: test-iodvq + body: + mappings: + properties: + some_keyword: + type: keyword + index: true + doc_values: true + byte: + type: byte + index: true + doc_values: true + double: + type: double + index: true + doc_values: true + float: + type: float + index: true + doc_values: true + half_float: + type: half_float + index: true + doc_values: true + integer: + type: integer + index: true + doc_values: true + long: + type: long + index: true + doc_values: true + short: + type: short + index: true + doc_values: true + unsigned_long: + type: unsigned_long + index: true + doc_values: true + + - do: + bulk: + index: test-iodvq + refresh: true + body: + - '{"index": {"_index": "test-iodvq", "_id": "1" }}' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "index": { "_index": "test-iodvq", "_id": "2" }}' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "index": { "_index": "test-iodvq", "_id": "3" } }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + prefix: + some_keyword: "ing" + + - match: { hits.hits.0._source.some_keyword: "ingesting some random keyword data" } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + "some_keyword": { + "lt": 500 + } } + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + half_float: 400.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + float: 800.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + double: 100.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + byte: 120 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + short: 150 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + integer: 1291 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + long: 13456 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + unsigned_long: 10223372036854775800 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + half_float: [ 400.0, 401.0 ] + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + float: [ 800.0, 801.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + byte: [ 120, 121 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + double: [ 100.0, 101.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + short: [ 150, 151 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + integer: [ 1290, 1291 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + long: [ 13456, 13457 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + unsigned_long: [ 10223372036854775800, 10223372036854775801 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + half_float: { + gte: 401.0, + lte: 402.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + float: { + gte: 801.0, + lte: 802.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + byte: { + gte: 120, + lte: 121 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + double: { + gte: 101.0, + lte: 102.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + short: { + gte: 151, + lte: 152 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + integer: { + gte: 1291, + lte: 1292 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + long: { + gte: 13457, + lte: 13458 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: { + unsigned_long: { + gte: 10223372036854775801, + lte: 10223372036854775802 + }, + } + + - match: { hits.total: 2 } + +--- +"search on fields with only index enabled": + - do: + indices.create: + index: test-index + body: + mappings: + properties: + some_keyword: + type: keyword + index: true + doc_values: false + byte: + type: byte + index: true + doc_values: false + double: + type: double + index: true + doc_values: false + float: + type: float + index: true + doc_values: false + half_float: + type: half_float + index: true + doc_values: false + integer: + type: integer + index: true + doc_values: false + long: + type: long + index: true + doc_values: false + short: + type: short + index: true + doc_values: false + unsigned_long: + type: unsigned_long + index: true + doc_values: false + + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_index": "test-index", "_id": "1" }}' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "index": { "_index": "test-index", "_id": "2" }}' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "index": { "_index": "test-index", "_id": "3" } }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + prefix: + some_keyword: "ing" + + - match: { hits.hits.0._source.some_keyword: "ingesting some random keyword data" } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + "some_keyword": { + "lt": 500 + } } + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + half_float: 400.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + float: 800.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + double: 100.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + byte: 120 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + short: 150 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + integer: 1291 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + long: 13456 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + unsigned_long: 10223372036854775800 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + half_float: [ 400.0, 401.0 ] + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + float: [ 800.0, 801.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + byte: [ 120, 121 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + double: [ 100.0, 101.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + short: [ 150, 151 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + integer: [ 1290, 1291 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + long: [ 13456, 13457 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + unsigned_long: [ 10223372036854775800, 10223372036854775801 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + half_float: { + gte: 401.0, + lte: 402.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + float: { + gte: 801.0, + lte: 802.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + byte: { + gte: 120, + lte: 121 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + double: { + gte: 101.0, + lte: 102.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + short: { + gte: 151, + lte: 152 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + integer: { + gte: 1291, + lte: 1292 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + long: { + gte: 13457, + lte: 13458 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: { + unsigned_long: { + gte: 10223372036854775801, + lte: 10223372036854775802 + }, + } + + - match: { hits.total: 2 } + +--- +"search on fields with only doc_values enabled": + - do: + indices.create: + index: test-doc-values + body: + mappings: + properties: + some_keyword: + type: keyword + index: false + doc_values: true + byte: + type: byte + index: false + doc_values: true + double: + type: double + index: false + doc_values: true + float: + type: float + index: false + doc_values: true + half_float: + type: half_float + index: false + doc_values: true + integer: + type: integer + index: false + doc_values: true + long: + type: long + index: false + doc_values: true + short: + type: short + index: false + doc_values: true + unsigned_long: + type: unsigned_long + index: false + doc_values: true + + - do: + bulk: + index: test-doc-values + refresh: true + body: + - '{"index": {"_index": "test-doc-values", "_id": "1" }}' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "index": { "_index": "test-doc-values", "_id": "2" }}' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "index": { "_index": "test-doc-values", "_id": "3" } }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + prefix: + some_keyword: "ing" + + - match: { hits.hits.0._source.some_keyword: "ingesting some random keyword data" } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + "some_keyword": { + "lt": 500 + } } + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + half_float: 400.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + float: 800.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + double: 100.0 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + byte: 120 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + short: 150 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + integer: 1291 + + - match: { hits.total: 1 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + long: 13456 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + unsigned_long: 10223372036854775800 + + - match: { hits.total: 1 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + half_float: [ 400.0, 401.0 ] + + - match: { hits.total: 2 } + + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + float: [ 800.0, 801.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + byte: [ 120, 121 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + double: [ 100.0, 101.0 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + short: [ 150, 151 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + integer: [ 1290, 1291 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + long: [ 13456, 13457 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + terms: + unsigned_long: [ 10223372036854775800, 10223372036854775801 ] + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + half_float: { + gte: 401.0, + lte: 402.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + float: { + gte: 801.0, + lte: 802.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + byte: { + gte: 120, + lte: 121 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + double: { + gte: 101.0, + lte: 102.0 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + short: { + gte: 151, + lte: 152 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + integer: { + gte: 1291, + lte: 1292 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + long: { + gte: 13457, + lte: 13458 + }, + } + + - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: { + unsigned_long: { + gte: 10223372036854775801, + lte: 10223372036854775802 + }, + } + + - match: { hits.total: 2 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_keyword_doc_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_keyword_doc_values.yml deleted file mode 100644 index 8829e7b100fdd..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_keyword_doc_values.yml +++ /dev/null @@ -1,46 +0,0 @@ ---- -"search on keyword fields with doc_values enabled": - - do: - indices.create: - index: test - body: - mappings: - properties: - "some_keyword": - type: "keyword" - index: true - doc_values: true - - - do: - bulk: - index: test - refresh: true - body: - - '{"index": {"_index": "test", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data" }' - - '{ "index": { "_index": "test", "_id": "2" }}' - - '{ "some_keyword": "400" }' - - '{ "index": { "_index": "test", "_id": "3" } }' - - '{ "some_keyword": "5" }' - - - do: - search: - index: test - body: - query: - prefix: - some_keyword: "ing" - - - match: { hits.hits.0._source.some_keyword: "ingesting some random keyword data" } - - - do: - search: - index: test - body: - query: - range: { - "some_keyword": { - "lt": 500 - } } - - - match: { hits.total.value: 2 } diff --git a/server/src/main/java/org/apache/lucene/util/LongHashSet.java b/server/src/main/java/org/apache/lucene/util/LongHashSet.java new file mode 100644 index 0000000000000..a463e8a189585 --- /dev/null +++ b/server/src/main/java/org/apache/lucene/util/LongHashSet.java @@ -0,0 +1,136 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.apache.lucene.util; + +import org.apache.lucene.util.packed.PackedInts; + +import java.util.Arrays; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.stream.LongStream; + +/** Set of longs, optimized for docvalues usage */ +public final class LongHashSet implements Accountable { + private static final long BASE_RAM_BYTES = RamUsageEstimator.shallowSizeOfInstance(LongHashSet.class); + + private static final long MISSING = Long.MIN_VALUE; + + final long[] table; + final int mask; + final boolean hasMissingValue; + final int size; + /** minimum value in the set, or Long.MAX_VALUE for an empty set */ + public final long minValue; + /** maximum value in the set, or Long.MIN_VALUE for an empty set */ + public final long maxValue; + + /** Construct a set. Values must be in sorted order. */ + public LongHashSet(long[] values) { + int tableSize = Math.toIntExact(values.length * 3L / 2); + tableSize = 1 << PackedInts.bitsRequired(tableSize); // make it a power of 2 + assert tableSize >= values.length * 3L / 2; + table = new long[tableSize]; + Arrays.fill(table, MISSING); + mask = tableSize - 1; + boolean hasMissingValue = false; + int size = 0; + long previousValue = Long.MIN_VALUE; // for assert + for (long value : values) { + if (value == MISSING) { + size += hasMissingValue ? 0 : 1; + hasMissingValue = true; + } else if (add(value)) { + ++size; + } + assert value >= previousValue : "values must be provided in sorted order"; + previousValue = value; + } + this.hasMissingValue = hasMissingValue; + this.size = size; + this.minValue = values.length == 0 ? Long.MAX_VALUE : values[0]; + this.maxValue = values.length == 0 ? Long.MIN_VALUE : values[values.length - 1]; + } + + private boolean add(long l) { + assert l != MISSING; + final int slot = Long.hashCode(l) & mask; + for (int i = slot;; i = (i + 1) & mask) { + if (table[i] == MISSING) { + table[i] = l; + return true; + } else if (table[i] == l) { + // already added + return false; + } + } + } + + /** + * check for membership in the set. + * + * <p>You should use {@link #minValue} and {@link #maxValue} to guide/terminate iteration before + * calling this. + */ + public boolean contains(long l) { + if (l == MISSING) { + return hasMissingValue; + } + final int slot = Long.hashCode(l) & mask; + for (int i = slot;; i = (i + 1) & mask) { + if (table[i] == MISSING) { + return false; + } else if (table[i] == l) { + return true; + } + } + } + + /** returns a stream of all values contained in this set */ + LongStream stream() { + LongStream stream = Arrays.stream(table).filter(v -> v != MISSING); + if (hasMissingValue) { + stream = LongStream.concat(LongStream.of(MISSING), stream); + } + return stream; + } + + @Override + public int hashCode() { + return Objects.hash(size, minValue, maxValue, mask, hasMissingValue, Arrays.hashCode(table)); + } + + @Override + public boolean equals(Object obj) { + if (obj != null && obj instanceof LongHashSet) { + LongHashSet that = (LongHashSet) obj; + return size == that.size + && minValue == that.minValue + && maxValue == that.maxValue + && mask == that.mask + && hasMissingValue == that.hasMissingValue + && Arrays.equals(table, that.table); + } + return false; + } + + @Override + public String toString() { + return stream().mapToObj(String::valueOf).collect(Collectors.joining(", ", "[", "]")); + } + + /** number of elements in the set */ + public int size() { + return size; + } + + @Override + public long ramBytesUsed() { + return BASE_RAM_BYTES + RamUsageEstimator.sizeOfObject(table); + } +} diff --git a/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java new file mode 100644 index 0000000000000..669dbb1e1bfc7 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/document/SortedUnsignedLongDocValuesSetQuery.java @@ -0,0 +1,176 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.document; + +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.LongHashSet; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.Objects; + +/** + * The {@link org.apache.lucene.document.SortedNumericDocValuesSetQuery} implementation for unsigned long numeric data type. + * + * @opensearch.internal + */ +public abstract class SortedUnsignedLongDocValuesSetQuery extends Query { + + private final String field; + private final LongHashSet numbers; + + SortedUnsignedLongDocValuesSetQuery(String field, BigInteger[] numbers) { + this.field = Objects.requireNonNull(field); + Arrays.sort(numbers); + this.numbers = new LongHashSet(Arrays.stream(numbers).mapToLong(n -> n.longValue()).toArray()); + } + + @Override + public String toString(String field) { + return new StringBuilder().append(field).append(": ").append(numbers.toString()).toString(); + } + + @Override + public void visit(QueryVisitor visitor) { + if (visitor.acceptField(field)) { + visitor.visitLeaf(this); + } + } + + @Override + public Query rewrite(IndexSearcher indexSearcher) throws IOException { + if (numbers.size() == 0) { + return new MatchNoDocsQuery(); + } + return super.rewrite(indexSearcher); + } + + @Override + public boolean equals(Object other) { + if (sameClassAs(other) == false) { + return false; + } + SortedUnsignedLongDocValuesSetQuery that = (SortedUnsignedLongDocValuesSetQuery) other; + return field.equals(that.field) && numbers.equals(that.numbers); + } + + @Override + public int hashCode() { + return Objects.hash(classHash(), field, numbers); + } + + abstract SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException; + + @Override + public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { + + @Override + public boolean isCacheable(LeafReaderContext ctx) { + return DocValues.isCacheable(ctx, field); + } + + @Override + public Scorer scorer(LeafReaderContext context) throws IOException { + SortedNumericDocValues values = getValues(context.reader(), field); + if (values == null) { + return null; + } + final NumericDocValues singleton = DocValues.unwrapSingleton(values); + final TwoPhaseIterator iterator; + if (singleton != null) { + iterator = new TwoPhaseIterator(singleton) { + @Override + public boolean matches() throws IOException { + long value = singleton.longValue(); + return Long.compareUnsigned(value, numbers.minValue) >= 0 + && Long.compareUnsigned(value, numbers.maxValue) <= 0 + && numbers.contains(value); + } + + @Override + public float matchCost() { + return 5; // 2 comparisions, possible lookup in the set + } + }; + } else { + iterator = new TwoPhaseIterator(values) { + @Override + public boolean matches() throws IOException { + int count = values.docValueCount(); + for (int i = 0; i < count; i++) { + final long value = values.nextValue(); + if (Long.compareUnsigned(value, numbers.minValue) < 0) { + continue; + } else if (Long.compareUnsigned(value, numbers.maxValue) > 0) { + return false; // values are sorted, terminate + } else if (numbers.contains(value)) { + return true; + } + } + return false; + } + + @Override + public float matchCost() { + return 5; // 2 comparisons, possible lookup in the set + } + }; + } + return new ConstantScoreScorer(this, score(), scoreMode, iterator); + } + }; + } + + public static Query newSlowSetQuery(String field, BigInteger... values) { + return new SortedUnsignedLongDocValuesSetQuery(field, values) { + @Override + SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException { + FieldInfo info = reader.getFieldInfos().fieldInfo(field); + if (info == null) { + // Queries have some optimizations when one sub scorer returns null rather + // than a scorer that does not match any documents + return null; + } + return DocValues.getSortedNumeric(reader, field); + } + }; + } + + public static Query newSlowExactQuery(String field, BigInteger value) { + return new SortedUnsignedLongDocValuesRangeQuery(field, value, value) { + @Override + SortedNumericDocValues getValues(LeafReader reader, String field) throws IOException { + FieldInfo info = reader.getFieldInfos().fieldInfo(field); + if (info == null) { + // Queries have some optimizations when one sub scorer returns null rather + // than a scorer that does not match any documents + return null; + } + return DocValues.getSortedNumeric(reader, field); + } + }; + } +} diff --git a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java index 204e7bc4c16ab..524d2b0e0dd38 100644 --- a/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/NumberFieldMapper.java @@ -37,6 +37,7 @@ import org.apache.lucene.document.DoublePoint; import org.apache.lucene.document.Field; +import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.LongPoint; @@ -61,6 +62,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.core.xcontent.XContentParser.Token; import org.opensearch.index.document.SortedUnsignedLongDocValuesRangeQuery; +import org.opensearch.index.document.SortedUnsignedLongDocValuesSetQuery; import org.opensearch.index.fielddata.IndexFieldData; import org.opensearch.index.fielddata.IndexNumericFieldData.NumericType; import org.opensearch.index.fielddata.plain.SortedNumericIndexFieldData; @@ -201,18 +203,39 @@ public Float parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { float v = parse(value, false); + if (isSearchable && hasDocValues) { + Query query = HalfFloatPoint.newExactQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, HalfFloatPoint.halfFloatToSortableShort(v)); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowExactQuery(field, HalfFloatPoint.halfFloatToSortableShort(v)); + } return HalfFloatPoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { float[] v = new float[values.size()]; + long points[] = new long[v.length]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i), false); + if (hasDocValues) { + points[i] = HalfFloatPoint.halfFloatToSortableShort(v[i]); + } + } + if (isSearchable && hasDocValues) { + Query query = HalfFloatPoint.newSetQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowSetQuery(field, points); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowSetQuery(field, points); } return HalfFloatPoint.newSetQuery(field, v); + } @Override @@ -223,6 +246,7 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { float l = Float.NEGATIVE_INFINITY; @@ -241,16 +265,23 @@ public Query rangeQuery( } u = HalfFloatPoint.nextDown(u); } - Query query = HalfFloatPoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = HalfFloatPoint.newRangeQuery(field, l, u); Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery( field, HalfFloatPoint.halfFloatToSortableShort(l), HalfFloatPoint.halfFloatToSortableShort(u) ); - query = new IndexOrDocValuesQuery(query, dvQuery); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowRangeQuery( + field, + HalfFloatPoint.halfFloatToSortableShort(l), + HalfFloatPoint.halfFloatToSortableShort(u) + ); } - return query; + return HalfFloatPoint.newRangeQuery(field, l, u); } @Override @@ -309,18 +340,39 @@ public Float parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { float v = parse(value, false); + if (isSearchable && hasDocValues) { + Query query = FloatPoint.newExactQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, NumericUtils.floatToSortableInt(v)); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowExactQuery(field, NumericUtils.floatToSortableInt(v)); + } return FloatPoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { float[] v = new float[values.size()]; + long points[] = new long[v.length]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i), false); + if (hasDocValues) { + points[i] = NumericUtils.floatToSortableInt(v[i]); + } + } + if (isSearchable && hasDocValues) { + return new IndexOrDocValuesQuery( + FloatPoint.newSetQuery(field, v), + SortedNumericDocValuesField.newSlowSetQuery(field, points) + ); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowSetQuery(field, points); } - return FloatPoint.newSetQuery(field, v); + return FloatField.newSetQuery(field, v); } @Override @@ -331,6 +383,7 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { float l = Float.NEGATIVE_INFINITY; @@ -347,16 +400,23 @@ public Query rangeQuery( u = FloatPoint.nextDown(u); } } - Query query = FloatPoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = FloatPoint.newRangeQuery(field, l, u); Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery( field, NumericUtils.floatToSortableInt(l), NumericUtils.floatToSortableInt(u) ); - query = new IndexOrDocValuesQuery(query, dvQuery); + return new IndexOrDocValuesQuery(query, dvQuery); } - return query; + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowRangeQuery( + field, + NumericUtils.floatToSortableInt(l), + NumericUtils.floatToSortableInt(u) + ); + } + return FloatPoint.newRangeQuery(field, l, u); } @Override @@ -406,16 +466,37 @@ public Double parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { double v = parse(value, false); + if (isSearchable && hasDocValues) { + Query query = DoublePoint.newExactQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, NumericUtils.doubleToSortableLong(v)); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowExactQuery(field, NumericUtils.doubleToSortableLong(v)); + } return DoublePoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { double[] v = new double[values.size()]; + long points[] = new long[v.length]; for (int i = 0; i < values.size(); ++i) { v[i] = parse(values.get(i), false); + if (hasDocValues) { + points[i] = NumericUtils.doubleToSortableLong(v[i]); + } + } + if (isSearchable && hasDocValues) { + return new IndexOrDocValuesQuery( + DoublePoint.newSetQuery(field, v), + SortedNumericDocValuesField.newSlowSetQuery(field, points) + ); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowSetQuery(field, points); } return DoublePoint.newSetQuery(field, v); } @@ -428,19 +509,27 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { return doubleRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (l, u) -> { - Query query = DoublePoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = DoublePoint.newRangeQuery(field, l, u); Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery( field, NumericUtils.doubleToSortableLong(l), NumericUtils.doubleToSortableLong(u) ); - query = new IndexOrDocValuesQuery(query, dvQuery); + return new IndexOrDocValuesQuery(query, dvQuery); } - return query; + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowRangeQuery( + field, + NumericUtils.doubleToSortableLong(l), + NumericUtils.doubleToSortableLong(u) + ); + } + return DoublePoint.newRangeQuery(field, l, u); }); } @@ -504,13 +593,13 @@ public Short parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { - return INTEGER.termQuery(field, value); + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { + return INTEGER.termQuery(field, value, hasDocValues, isSearchable); } @Override - public Query termsQuery(String field, List<Object> values) { - return INTEGER.termsQuery(field, values); + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { + return INTEGER.termsQuery(field, values, hasDocValues, isSearchable); } @Override @@ -521,9 +610,10 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { - return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues, context); + return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues, isSearchable, context); } @Override @@ -571,13 +661,13 @@ public Short parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { - return INTEGER.termQuery(field, value); + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { + return INTEGER.termQuery(field, value, hasDocValues, isSearchable); } @Override - public Query termsQuery(String field, List<Object> values) { - return INTEGER.termsQuery(field, values); + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { + return INTEGER.termsQuery(field, values, hasDocValues, isSearchable); } @Override @@ -588,9 +678,10 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { - return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues, context); + return INTEGER.rangeQuery(field, lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues, isSearchable, context); } @Override @@ -638,16 +729,24 @@ public Integer parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { if (hasDecimalPart(value)) { return Queries.newMatchNoDocsQuery("Value [" + value + "] has a decimal part"); } int v = parse(value, true); + if (isSearchable && hasDocValues) { + Query query = IntPoint.newExactQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, v); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowExactQuery(field, v); + } return IntPoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { int[] v = new int[values.size()]; int upTo = 0; @@ -664,6 +763,21 @@ public Query termsQuery(String field, List<Object> values) { if (upTo != v.length) { v = Arrays.copyOf(v, upTo); } + long points[] = new long[v.length]; + if (hasDocValues) { + for (int i = 0; i < v.length; i++) { + points[i] = v[i]; + } + } + if (isSearchable && hasDocValues) { + return new IndexOrDocValuesQuery( + IntPoint.newSetQuery(field, v), + SortedNumericDocValuesField.newSlowSetQuery(field, points) + ); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowSetQuery(field, points); + } return IntPoint.newSetQuery(field, v); } @@ -675,6 +789,7 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { int l = Integer.MIN_VALUE; @@ -704,15 +819,23 @@ public Query rangeQuery( --u; } } - Query query = IntPoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = IntPoint.newRangeQuery(field, l, u); Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(field, l, u); query = new IndexOrDocValuesQuery(query, dvQuery); if (context.indexSortedOnField(field)) { query = new IndexSortSortedNumericDocValuesRangeQuery(field, l, u, query); } + return query; } - return query; + if (hasDocValues) { + Query query = SortedNumericDocValuesField.newSlowRangeQuery(field, l, u); + if (context.indexSortedOnField(field)) { + query = new IndexSortSortedNumericDocValuesRangeQuery(field, l, u, query); + } + return query; + } + return IntPoint.newRangeQuery(field, l, u); } @Override @@ -752,17 +875,28 @@ public Long parse(XContentParser parser, boolean coerce) throws IOException { } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { if (hasDecimalPart(value)) { return Queries.newMatchNoDocsQuery("Value [" + value + "] has a decimal part"); } long v = parse(value, true); + if (isSearchable && hasDocValues) { + Query query = LongPoint.newExactQuery(field, v); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery(field, v); + return new IndexOrDocValuesQuery(query, dvQuery); + + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowExactQuery(field, v); + + } return LongPoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable) { long[] v = new long[values.size()]; + int upTo = 0; for (int i = 0; i < values.size(); i++) { @@ -778,6 +912,16 @@ public Query termsQuery(String field, List<Object> values) { if (upTo != v.length) { v = Arrays.copyOf(v, upTo); } + if (isSearchable && hasDocValues) { + return new IndexOrDocValuesQuery( + LongPoint.newSetQuery(field, v), + SortedNumericDocValuesField.newSlowSetQuery(field, v) + ); + } + if (hasDocValues) { + return SortedNumericDocValuesField.newSlowSetQuery(field, v); + + } return LongPoint.newSetQuery(field, v); } @@ -789,18 +933,28 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { return longRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (l, u) -> { - Query query = LongPoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = LongPoint.newRangeQuery(field, l, u); Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(field, l, u); query = new IndexOrDocValuesQuery(query, dvQuery); if (context.indexSortedOnField(field)) { query = new IndexSortSortedNumericDocValuesRangeQuery(field, l, u, query); } + return query; } - return query; + if (hasDocValues) { + Query query = SortedNumericDocValuesField.newSlowRangeQuery(field, l, u); + if (context.indexSortedOnField(field)) { + query = new IndexSortSortedNumericDocValuesRangeQuery(field, l, u, query); + } + return query; + } + return LongPoint.newRangeQuery(field, l, u); + }); } @@ -841,16 +995,24 @@ public BigInteger parse(XContentParser parser, boolean coerce) throws IOExceptio } @Override - public Query termQuery(String field, Object value) { + public Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable) { if (hasDecimalPart(value)) { return Queries.newMatchNoDocsQuery("Value [" + value + "] has a decimal part"); } BigInteger v = parse(value, true); + if (isSearchable && hasDocValues) { + Query query = BigIntegerPoint.newExactQuery(field, v); + Query dvQuery = SortedUnsignedLongDocValuesSetQuery.newSlowExactQuery(field, v); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocValues) { + return SortedUnsignedLongDocValuesSetQuery.newSlowExactQuery(field, v); + } return BigIntegerPoint.newExactQuery(field, v); } @Override - public Query termsQuery(String field, List<Object> values) { + public Query termsQuery(String field, List<Object> values, boolean hasDocvalues, boolean isSearchable) { BigInteger[] v = new BigInteger[values.size()]; int upTo = 0; @@ -868,6 +1030,14 @@ public Query termsQuery(String field, List<Object> values) { v = Arrays.copyOf(v, upTo); } + if (isSearchable && hasDocvalues) { + Query query = BigIntegerPoint.newSetQuery(field, v); + Query dvQuery = SortedUnsignedLongDocValuesSetQuery.newSlowSetQuery(field, v); + return new IndexOrDocValuesQuery(query, dvQuery); + } + if (hasDocvalues) { + return SortedUnsignedLongDocValuesSetQuery.newSlowSetQuery(field, v); + } return BigIntegerPoint.newSetQuery(field, v); } @@ -879,15 +1049,19 @@ public Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ) { return unsignedLongRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (l, u) -> { - Query query = BigIntegerPoint.newRangeQuery(field, l, u); - if (hasDocValues) { + if (isSearchable && hasDocValues) { + Query query = BigIntegerPoint.newRangeQuery(field, l, u); Query dvQuery = SortedUnsignedLongDocValuesRangeQuery.newSlowRangeQuery(field, l, u); - query = new IndexOrDocValuesQuery(query, dvQuery); + return new IndexOrDocValuesQuery(query, dvQuery); } - return query; + if (hasDocValues) { + return SortedUnsignedLongDocValuesRangeQuery.newSlowRangeQuery(field, l, u); + } + return BigIntegerPoint.newRangeQuery(field, l, u); }); } @@ -941,9 +1115,9 @@ public final TypeParser parser() { return parser; } - public abstract Query termQuery(String field, Object value); + public abstract Query termQuery(String field, Object value, boolean hasDocValues, boolean isSearchable); - public abstract Query termsQuery(String field, List<Object> values); + public abstract Query termsQuery(String field, List<Object> values, boolean hasDocValues, boolean isSearchable); public abstract Query rangeQuery( String field, @@ -952,6 +1126,7 @@ public abstract Query rangeQuery( boolean includeLower, boolean includeUpper, boolean hasDocValues, + boolean isSearchable, QueryShardContext context ); @@ -1226,8 +1401,8 @@ public NumericType numericType() { @Override public Query termQuery(Object value, QueryShardContext context) { - failIfNotIndexed(); - Query query = type.termQuery(name(), value); + failIfNotIndexedAndNoDocValues(); + Query query = type.termQuery(name(), value, hasDocValues(), isSearchable()); if (boost() != 1f) { query = new BoostQuery(query, boost()); } @@ -1236,8 +1411,8 @@ public Query termQuery(Object value, QueryShardContext context) { @Override public Query termsQuery(List values, QueryShardContext context) { - failIfNotIndexed(); - Query query = type.termsQuery(name(), values); + failIfNotIndexedAndNoDocValues(); + Query query = type.termsQuery(name(), values, hasDocValues(), isSearchable()); if (boost() != 1f) { query = new BoostQuery(query, boost()); } @@ -1246,8 +1421,17 @@ public Query termsQuery(List values, QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - failIfNotIndexed(); - Query query = type.rangeQuery(name(), lowerTerm, upperTerm, includeLower, includeUpper, hasDocValues(), context); + failIfNotIndexedAndNoDocValues(); + Query query = type.rangeQuery( + name(), + lowerTerm, + upperTerm, + includeLower, + includeUpper, + hasDocValues(), + isSearchable(), + context + ); if (boost() != 1f) { query = new BoostQuery(query, boost()); } diff --git a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java index 3c30bb81a9a32..af852b12e7a30 100644 --- a/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/NumberFieldTypeTests.java @@ -66,6 +66,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.document.SortedUnsignedLongDocValuesRangeQuery; +import org.opensearch.index.document.SortedUnsignedLongDocValuesSetQuery; import org.opensearch.index.fielddata.IndexNumericFieldData; import org.opensearch.index.mapper.MappedFieldType.Relation; import org.opensearch.index.mapper.NumberFieldMapper.NumberFieldType; @@ -118,15 +119,27 @@ public void testIsFieldWithinQuery() throws IOException { public void testIntegerTermsQueryWithDecimalPart() { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.INTEGER); - assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1, 2.1), null)); - assertEquals(IntPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1.0, 2.1), null)); + assertEquals( + new IndexOrDocValuesQuery(IntPoint.newSetQuery("field", 1), SortedNumericDocValuesField.newSlowSetQuery("field", 1)), + ft.termsQuery(Arrays.asList(1, 2.1), null) + ); + assertEquals( + new IndexOrDocValuesQuery(IntPoint.newSetQuery("field", 1), SortedNumericDocValuesField.newSlowSetQuery("field", 1)), + ft.termsQuery(Arrays.asList(1.0, 2.1), null) + ); assertTrue(ft.termsQuery(Arrays.asList(1.1, 2.1), null) instanceof MatchNoDocsQuery); } public void testLongTermsQueryWithDecimalPart() { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberType.LONG); - assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1, 2.1), null)); - assertEquals(LongPoint.newSetQuery("field", 1), ft.termsQuery(Arrays.asList(1.0, 2.1), null)); + assertEquals( + new IndexOrDocValuesQuery(LongPoint.newSetQuery("field", 1), SortedNumericDocValuesField.newSlowSetQuery("field", 1)), + ft.termsQuery(Arrays.asList(1, 2.1), null) + ); + assertEquals( + new IndexOrDocValuesQuery(LongPoint.newSetQuery("field", 1), SortedNumericDocValuesField.newSlowSetQuery("field", 1)), + ft.termsQuery(Arrays.asList(1.0, 2.1), null) + ); assertTrue(ft.termsQuery(Arrays.asList(1.1, 2.1), null) instanceof MatchNoDocsQuery); } @@ -151,16 +164,18 @@ public void testLongTermQueryWithDecimalPart() { } private static MappedFieldType unsearchable() { - return new NumberFieldType("field", NumberType.LONG, false, false, true, true, null, Collections.emptyMap()); + return new NumberFieldType("field", NumberType.LONG, false, false, false, true, null, Collections.emptyMap()); } public void testTermQuery() { MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.LONG); - assertEquals(LongPoint.newExactQuery("field", 42), ft.termQuery("42", null)); + Query dvQuery = SortedNumericDocValuesField.newSlowExactQuery("field", 42); + Query query = new IndexOrDocValuesQuery(LongPoint.newExactQuery("field", 42), dvQuery); + assertEquals(query, ft.termQuery("42", null)); MappedFieldType unsearchable = unsearchable(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("42", null)); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testRangeQueryWithNegativeBounds() { @@ -380,7 +395,7 @@ public void testLongRangeQuery() { IllegalArgumentException.class, () -> unsearchable.rangeQuery("1", "3", true, true, null, null, null, MOCK_QSC) ); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testUnsignedLongRangeQuery() { @@ -396,7 +411,23 @@ public void testUnsignedLongRangeQuery() { IllegalArgumentException.class, () -> unsearchable.rangeQuery("1", "3", true, true, null, null, null, MOCK_QSC) ); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); + } + + public void testUnsignedLongTermsQuery() { + MappedFieldType ft = new NumberFieldMapper.NumberFieldType("field", NumberFieldMapper.NumberType.UNSIGNED_LONG); + Query expected = new IndexOrDocValuesQuery( + BigIntegerPoint.newSetQuery("field", BigInteger.valueOf(1), BigInteger.valueOf(3)), + SortedUnsignedLongDocValuesSetQuery.newSlowSetQuery("field", BigInteger.valueOf(1), BigInteger.valueOf(3)) + ); + assertEquals(expected, ft.termsQuery(List.of("1", "3"), MOCK_QSC)); + + MappedFieldType unsearchable = unsearchable(); + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> unsearchable.termsQuery(List.of("1", "3"), MOCK_QSC) + ); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testDoubleRangeQuery() { @@ -416,7 +447,7 @@ public void testDoubleRangeQuery() { IllegalArgumentException.class, () -> unsearchable.rangeQuery("1", "3", true, true, null, null, null, MOCK_QSC) ); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testConversions() { @@ -518,8 +549,8 @@ public void testHalfFloatRange() throws IOException { float u = (randomFloat() * 2 - 1) * 65504; boolean includeLower = randomBoolean(); boolean includeUpper = randomBoolean(); - Query floatQ = NumberType.FLOAT.rangeQuery("float", l, u, includeLower, includeUpper, false, MOCK_QSC); - Query halfFloatQ = NumberType.HALF_FLOAT.rangeQuery("half_float", l, u, includeLower, includeUpper, false, MOCK_QSC); + Query floatQ = NumberType.FLOAT.rangeQuery("float", l, u, includeLower, includeUpper, false, true, MOCK_QSC); + Query halfFloatQ = NumberType.HALF_FLOAT.rangeQuery("half_float", l, u, includeLower, includeUpper, false, true, MOCK_QSC); assertEquals(searcher.count(floatQ), searcher.count(halfFloatQ)); } IOUtils.close(reader, dir); @@ -549,8 +580,17 @@ public void testUnsignedLongRange() throws IOException { BigInteger u = randomUnsignedLong(); boolean includeLower = randomBoolean(); boolean includeUpper = randomBoolean(); - Query unsignedLongQ = NumberType.UNSIGNED_LONG.rangeQuery("unsigned_long", l, u, includeLower, includeUpper, false, MOCK_QSC); - Query doubleQ = NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper, false, MOCK_QSC); + Query unsignedLongQ = NumberType.UNSIGNED_LONG.rangeQuery( + "unsigned_long", + l, + u, + includeLower, + includeUpper, + false, + true, + MOCK_QSC + ); + Query doubleQ = NumberType.DOUBLE.rangeQuery("double", l, u, includeLower, includeUpper, false, true, MOCK_QSC); assertEquals(searcher.count(doubleQ), searcher.count(unsignedLongQ)); } IOUtils.close(reader, dir); @@ -558,21 +598,23 @@ public void testUnsignedLongRange() throws IOException { public void testNegativeZero() { assertEquals( - NumberType.DOUBLE.rangeQuery("field", null, -0d, true, true, false, MOCK_QSC), - NumberType.DOUBLE.rangeQuery("field", null, +0d, true, false, false, MOCK_QSC) + NumberType.DOUBLE.rangeQuery("field", null, -0d, true, true, false, true, MOCK_QSC), + NumberType.DOUBLE.rangeQuery("field", null, +0d, true, false, false, true, MOCK_QSC) ); assertEquals( - NumberType.FLOAT.rangeQuery("field", null, -0f, true, true, false, MOCK_QSC), - NumberType.FLOAT.rangeQuery("field", null, +0f, true, false, false, MOCK_QSC) + NumberType.FLOAT.rangeQuery("field", null, -0f, true, true, false, true, MOCK_QSC), + NumberType.FLOAT.rangeQuery("field", null, +0f, true, false, false, true, MOCK_QSC) ); assertEquals( - NumberType.HALF_FLOAT.rangeQuery("field", null, -0f, true, true, false, MOCK_QSC), - NumberType.HALF_FLOAT.rangeQuery("field", null, +0f, true, false, false, MOCK_QSC) + NumberType.HALF_FLOAT.rangeQuery("field", null, -0f, true, true, false, true, MOCK_QSC), + NumberType.HALF_FLOAT.rangeQuery("field", null, +0f, true, false, false, true, MOCK_QSC) ); - assertFalse(NumberType.DOUBLE.termQuery("field", -0d).equals(NumberType.DOUBLE.termQuery("field", +0d))); - assertFalse(NumberType.FLOAT.termQuery("field", -0f).equals(NumberType.FLOAT.termQuery("field", +0f))); - assertFalse(NumberType.HALF_FLOAT.termQuery("field", -0f).equals(NumberType.HALF_FLOAT.termQuery("field", +0f))); + assertFalse(NumberType.DOUBLE.termQuery("field", -0d, true, true).equals(NumberType.DOUBLE.termQuery("field", +0d, true, true))); + assertFalse(NumberType.FLOAT.termQuery("field", -0f, true, true).equals(NumberType.FLOAT.termQuery("field", +0f, true, true))); + assertFalse( + NumberType.HALF_FLOAT.termQuery("field", -0f, true, true).equals(NumberType.HALF_FLOAT.termQuery("field", +0f, true, true)) + ); } // Make sure we construct the IndexOrDocValuesQuery objects with queries that match @@ -628,6 +670,7 @@ public void doTestDocValueRangeQueries(NumberType type, Supplier<Number> valueSu randomBoolean(), randomBoolean(), true, + true, MOCK_QSC ); assertThat(query, instanceOf(IndexOrDocValuesQuery.class)); @@ -708,6 +751,7 @@ public void doTestIndexSortRangeQueries(NumberType type, Supplier<Number> valueS randomBoolean(), randomBoolean(), true, + true, context ); assertThat(query, instanceOf(IndexSortSortedNumericDocValuesRangeQuery.class)); diff --git a/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java index 3ac9bce840a20..c5bdf9b586df1 100644 --- a/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java @@ -36,6 +36,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; @@ -112,6 +113,7 @@ protected void doAssertLuceneQuery(TermQueryBuilder queryBuilder, Query query, Q either(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)) .or(instanceOf(MatchNoDocsQuery.class)) .or(instanceOf(AutomatonQuery.class)) + .or(instanceOf(IndexOrDocValuesQuery.class)) ); MappedFieldType mapper = context.fieldMapper(queryBuilder.fieldName()); if (query instanceof TermQuery) { diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index b587bec2d5343..32bf290627b63 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.PointInSetQuery; import org.apache.lucene.search.Query; @@ -135,6 +136,7 @@ protected void doAssertLuceneQuery(TermsQueryBuilder queryBuilder, Query query, either(instanceOf(TermInSetQuery.class)).or(instanceOf(PointInSetQuery.class)) .or(instanceOf(ConstantScoreQuery.class)) .or(instanceOf(MatchNoDocsQuery.class)) + .or(instanceOf(IndexOrDocValuesQuery.class)) ); if (query instanceof ConstantScoreQuery) { assertThat(((ConstantScoreQuery) query).getQuery(), instanceOf(BooleanQuery.class)); From 808ed56bcbc6ed547f64895cd02283df07fb5c48 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 4 Jan 2024 15:14:29 -0500 Subject: [PATCH 051/178] Add the means to extract the contextual underlying channel from HttpChannel without excessive typecasting (#11751) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 2 +- .../http/netty4/Netty4HttpChannel.java | 5 ++ .../http/netty4/Netty4HttpChannelTests.java | 52 +++++++++++++++++++ 3 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpChannelTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index ac15176af56f5..51276133af7d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -162,7 +162,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) - Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036)) -- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)) +- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562)), ([#11751](https://github.com/opensearch-project/OpenSearch/pull/11751)) - Introduce new dynamic cluster setting to control slice computation for concurrent segment search ([#9107](https://github.com/opensearch-project/OpenSearch/pull/9107)) - Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598)) - [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524)) diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java index 6475a0b744c60..75d30aa9797c0 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpChannel.java @@ -46,6 +46,7 @@ import io.netty.channel.ChannelPipeline; public class Netty4HttpChannel implements HttpChannel { + private static final String CHANNEL_PROPERTY = "channel"; private final Channel channel; private final CompletableContext<Void> closeContext = new CompletableContext<>(); @@ -102,6 +103,10 @@ public Channel getNettyChannel() { @SuppressWarnings("unchecked") @Override public <T> Optional<T> get(String name, Class<T> clazz) { + if (CHANNEL_PROPERTY.equalsIgnoreCase(name) && clazz.isAssignableFrom(Channel.class)) { + return (Optional<T>) Optional.of(getNettyChannel()); + } + Object handler = getNettyChannel().pipeline().get(name); if (handler == null && inboundPipeline() != null) { diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpChannelTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpChannelTests.java new file mode 100644 index 0000000000000..c49166a51c24a --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpChannelTests.java @@ -0,0 +1,52 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.http.netty4; + +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.transport.Netty4NioSocketChannel; +import org.junit.Before; + +import java.util.Optional; + +import io.netty.channel.Channel; +import io.netty.channel.ChannelOutboundInvoker; +import io.netty.channel.ServerChannel; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.sameInstance; + +public class Netty4HttpChannelTests extends OpenSearchTestCase { + private Netty4HttpChannel netty4HttpChannel; + private Channel channel; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + channel = new Netty4NioSocketChannel(); + netty4HttpChannel = new Netty4HttpChannel(channel); + } + + public void testChannelAttributeMatchesChannel() { + final Optional<Channel> channelOpt = netty4HttpChannel.get("channel", Channel.class); + assertThat(channelOpt.isPresent(), is(true)); + assertThat(channelOpt.get(), sameInstance(channel)); + } + + public void testChannelAttributeMatchesChannelOutboundInvoker() { + final Optional<ChannelOutboundInvoker> channelOpt = netty4HttpChannel.get("channel", ChannelOutboundInvoker.class); + assertThat(channelOpt.isPresent(), is(true)); + assertThat(channelOpt.get(), sameInstance(channel)); + } + + public void testChannelAttributeIsEmpty() { + final Optional<ServerChannel> channelOpt = netty4HttpChannel.get("channel", ServerChannel.class); + assertThat(channelOpt.isEmpty(), is(true)); + } +} From 22b628bacaab56c145538898ca190ffce9778c6e Mon Sep 17 00:00:00 2001 From: Michael Froh <froh@amazon.com> Date: Thu, 4 Jan 2024 21:13:28 +0000 Subject: [PATCH 052/178] Fix parsing of flat object fields with dots in keys (#11425) We have a bug where a flat object field with inner fields that contain dots will "push" the dotted name onto the dot-path from the root, but then would just "pop" off the last part of the dotted name. This change adds more robust support for flat object keys and subkeys that contain dots (i.e. it pops off the entirety of the latest key, regardless of how many dots it contains). Fixes https://github.com/opensearch-project/OpenSearch/issues/11402 Signed-off-by: Michael Froh <froh@amazon.com> --- CHANGELOG.md | 3 +- .../xcontent/JsonToStringXContentParser.java | 31 +++-- .../index/mapper/FlatObjectFieldMapper.java | 2 +- .../JsonToStringXContentParserTests.java | 113 ++++++++++++++++++ 4 files changed, 137 insertions(+), 12 deletions(-) create mode 100644 server/src/test/java/org/opensearch/common/xcontent/JsonToStringXContentParserTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 51276133af7d2..0958dd41d5a84 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -191,7 +191,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Fixed - Fix failure in dissect ingest processor parsing empty brackets ([#9225](https://github.com/opensearch-project/OpenSearch/pull/9255)) -- Fix class_cast_exception when passing int to _version and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101)) +- Fix `class_cast_exception` when passing int to `_version` and other metadata fields in ingest simulate API ([#10101](https://github.com/opensearch-project/OpenSearch/pull/10101)) - Fix Segment Replication ShardLockObtainFailedException bug during index corruption ([10370](https://github.com/opensearch-project/OpenSearch/pull/10370)) - Fix some test methods in SimulatePipelineRequestParsingTests never run and fix test failure ([#10496](https://github.com/opensearch-project/OpenSearch/pull/10496)) - Fix passing wrong parameter when calling newConfigurationException() in DotExpanderProcessor ([#10737](https://github.com/opensearch-project/OpenSearch/pull/10737)) @@ -203,6 +203,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix the issue with DefaultSpanScope restoring wrong span in the TracerContextStorage upon detach ([#11316](https://github.com/opensearch-project/OpenSearch/issues/11316)) - Remove shadowJar from `lang-painless` module publication ([#11369](https://github.com/opensearch-project/OpenSearch/issues/11369)) - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) +- Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) diff --git a/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java b/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java index 9e81d8a7af078..9b2bd06a88e2e 100644 --- a/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java +++ b/server/src/main/java/org/opensearch/common/xcontent/JsonToStringXContentParser.java @@ -18,7 +18,6 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentLocation; import org.opensearch.core.xcontent.XContentParser; -import org.opensearch.index.mapper.ParseContext; import java.io.IOException; import java.math.BigInteger; @@ -40,7 +39,6 @@ public class JsonToStringXContentParser extends AbstractXContentParser { private ArrayList<String> keyList = new ArrayList<>(); private XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent); - private ParseContext parseContext; private NamedXContentRegistry xContentRegistry; @@ -54,14 +52,13 @@ public class JsonToStringXContentParser extends AbstractXContentParser { public JsonToStringXContentParser( NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, - ParseContext parseContext, + XContentParser parser, String fieldTypeName ) throws IOException { super(xContentRegistry, deprecationHandler); - this.parseContext = parseContext; this.deprecationHandler = deprecationHandler; this.xContentRegistry = xContentRegistry; - this.parser = parseContext.parser(); + this.parser = parser; this.fieldTypeName = fieldTypeName; } @@ -86,8 +83,22 @@ private void parseToken(StringBuilder path, String currentFieldName) throws IOEx StringBuilder parsedFields = new StringBuilder(); if (this.parser.currentToken() == Token.FIELD_NAME) { - path.append(DOT_SYMBOL + currentFieldName); - this.keyList.add(currentFieldName); + path.append(DOT_SYMBOL).append(currentFieldName); + int dotIndex = currentFieldName.indexOf(DOT_SYMBOL); + String fieldNameSuffix = currentFieldName; + // The field name may be of the form foo.bar.baz + // If that's the case, each "part" is a key. + while (dotIndex >= 0) { + String fieldNamePrefix = fieldNameSuffix.substring(0, dotIndex); + if (!fieldNamePrefix.isEmpty()) { + this.keyList.add(fieldNamePrefix); + } + fieldNameSuffix = fieldNameSuffix.substring(dotIndex + 1); + dotIndex = fieldNameSuffix.indexOf(DOT_SYMBOL); + } + if (!fieldNameSuffix.isEmpty()) { + this.keyList.add(fieldNameSuffix); + } } else if (this.parser.currentToken() == Token.START_ARRAY) { parseToken(path, currentFieldName); break; @@ -97,18 +108,18 @@ private void parseToken(StringBuilder path, String currentFieldName) throws IOEx parseToken(path, currentFieldName); int dotIndex = path.lastIndexOf(DOT_SYMBOL); if (dotIndex != -1) { - path.delete(dotIndex, path.length()); + path.setLength(path.length() - currentFieldName.length() - 1); } } else { if (!path.toString().contains(currentFieldName)) { - path.append(DOT_SYMBOL + currentFieldName); + path.append(DOT_SYMBOL).append(currentFieldName); } parseValue(parsedFields); this.valueList.add(parsedFields.toString()); this.valueAndPathList.add(path + EQUAL_SYMBOL + parsedFields); int dotIndex = path.lastIndexOf(DOT_SYMBOL); if (dotIndex != -1) { - path.delete(dotIndex, path.length()); + path.setLength(path.length() - currentFieldName.length() - 1); } } diff --git a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java index 00b623dddac23..9a3f2595a7c9e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/FlatObjectFieldMapper.java @@ -572,7 +572,7 @@ protected void parseCreateField(ParseContext context) throws IOException { JsonToStringXContentParser JsonToStringParser = new JsonToStringXContentParser( NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, - context, + context.parser(), fieldType().name() ); /* diff --git a/server/src/test/java/org/opensearch/common/xcontent/JsonToStringXContentParserTests.java b/server/src/test/java/org/opensearch/common/xcontent/JsonToStringXContentParserTests.java new file mode 100644 index 0000000000000..0feb7bcd1ceec --- /dev/null +++ b/server/src/test/java/org/opensearch/common/xcontent/JsonToStringXContentParserTests.java @@ -0,0 +1,113 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.xcontent; + +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; + +public class JsonToStringXContentParserTests extends OpenSearchTestCase { + + private String flattenJsonString(String fieldName, String in) throws IOException { + String transformed; + try ( + XContentParser parser = JsonXContent.jsonXContent.createParser( + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + in + ) + ) { + JsonToStringXContentParser jsonToStringXContentParser = new JsonToStringXContentParser( + xContentRegistry(), + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + parser, + fieldName + ); + // Skip the START_OBJECT token: + jsonToStringXContentParser.nextToken(); + + XContentParser transformedParser = jsonToStringXContentParser.parseObject(); + try (XContentBuilder jsonBuilder = XContentFactory.jsonBuilder()) { + jsonBuilder.copyCurrentStructure(transformedParser); + return jsonBuilder.toString(); + } + } + } + + public void testNestedObjects() throws IOException { + String jsonExample = "{" + "\"first\" : \"1\"," + "\"second\" : {" + " \"inner\": \"2.0\"" + "}," + "\"third\": \"three\"" + "}"; + + assertEquals( + "{" + + "\"flat\":[\"first\",\"second\",\"inner\",\"third\"]," + + "\"flat._value\":[\"1\",\"2.0\",\"three\"]," + + "\"flat._valueAndPath\":[\"flat.first=1\",\"flat.second.inner=2.0\",\"flat.third=three\"]" + + "}", + flattenJsonString("flat", jsonExample) + ); + } + + public void testChildHasDots() throws IOException { + // This should be exactly the same as testNestedObjects. We're just using the "flat" notation for the inner + // object. + String jsonExample = "{" + "\"first\" : \"1\"," + "\"second.inner\" : \"2.0\"," + "\"third\": \"three\"" + "}"; + + assertEquals( + "{" + + "\"flat\":[\"first\",\"second\",\"inner\",\"third\"]," + + "\"flat._value\":[\"1\",\"2.0\",\"three\"]," + + "\"flat._valueAndPath\":[\"flat.first=1\",\"flat.second.inner=2.0\",\"flat.third=three\"]" + + "}", + flattenJsonString("flat", jsonExample) + ); + } + + public void testNestChildObjectWithDots() throws IOException { + String jsonExample = "{" + + "\"first\" : \"1\"," + + "\"second.inner\" : {" + + " \"really_inner\" : \"2.0\"" + + "}," + + "\"third\": \"three\"" + + "}"; + + assertEquals( + "{" + + "\"flat\":[\"first\",\"second\",\"inner\",\"really_inner\",\"third\"]," + + "\"flat._value\":[\"1\",\"2.0\",\"three\"]," + + "\"flat._valueAndPath\":[\"flat.first=1\",\"flat.second.inner.really_inner=2.0\",\"flat.third=three\"]" + + "}", + flattenJsonString("flat", jsonExample) + ); + } + + public void testNestChildObjectWithDotsAndFieldWithDots() throws IOException { + String jsonExample = "{" + + "\"first\" : \"1\"," + + "\"second.inner\" : {" + + " \"totally.absolutely.inner\" : \"2.0\"" + + "}," + + "\"third\": \"three\"" + + "}"; + + assertEquals( + "{" + + "\"flat\":[\"first\",\"second\",\"inner\",\"totally\",\"absolutely\",\"inner\",\"third\"]," + + "\"flat._value\":[\"1\",\"2.0\",\"three\"]," + + "\"flat._valueAndPath\":[\"flat.first=1\",\"flat.second.inner.totally.absolutely.inner=2.0\",\"flat.third=three\"]" + + "}", + flattenJsonString("flat", jsonExample) + ); + } + +} From 2860805e894c49a04ebf72121d96a24d353b6194 Mon Sep 17 00:00:00 2001 From: Rishabh Maurya <rishabhmaurya05@gmail.com> Date: Thu, 4 Jan 2024 13:17:25 -0800 Subject: [PATCH 053/178] Update the skip version for match_only_text field Integ tests to 2.11.99 (#11752) * update the skip version for integ tests to 2.11.99 Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> * fix the version in skip section reason Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> --------- Signed-off-by: Rishabh Maurya <rishabhmaurya05@gmail.com> --- .../11_match_field_match_only_text.yml | 4 ++-- .../20_ngram_search_field_match_only_text.yml | 8 ++++---- ..._ngram_highligthing_field_match_only_text.yml | 4 ++-- .../40_query_string_field_match_only_text.yml | 4 ++-- ...th_default_analyzer_field_match_only_text.yml | 4 ++-- ...eries_with_synonyms_field_match_only_text.yml | 8 ++++---- .../60_synonym_graph_field_match_only_text.yml | 4 ++-- .../70_intervals_field_match_only_text.yml | 8 ++++---- .../20_phrase_field_match_only_text.yml | 16 ++++++++-------- .../20_highlighting_field_match_only_text.yml | 4 ++-- .../20_query_string_field_match_only_text.yml | 4 ++-- .../30_sig_terms_field_match_only_text.yml | 4 ++-- .../90_sig_text_field_match_only_text.yml | 8 ++++---- .../20_highlighting_field_match_only_text.yml | 4 ++-- .../search/160_exists_query_match_only_text.yml | 4 ++-- .../200_phrase_search_field_match_only_text.yml | 4 ++-- ...0_match_bool_prefix_field_match_only_text.yml | 4 ++-- ...20_disallow_queries_field_match_only_text.yml | 4 ++-- .../10_basic_field_match_only_field.yml | 4 ++-- 19 files changed, 52 insertions(+), 52 deletions(-) diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml index 40ff2c2f4cdbe..140d70414a4a7 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/11_match_field_match_only_text.yml @@ -2,8 +2,8 @@ "match query with stacked stems": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" # Tests the match query stemmed tokens are "stacked" on top of the unstemmed # versions in the same position. - do: diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml index 95b648dee47c8..a5da3043f19b5 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search_field_match_only_text.yml @@ -1,7 +1,7 @@ "ngram search": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test @@ -45,8 +45,8 @@ --- "testNGramCopyField": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml index 597f55679a2c6..accf5d975d57f 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing_field_match_only_text.yml @@ -1,7 +1,7 @@ "ngram highlighting": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml index ddebb1d76acbc..717d3a7dd8a3e 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "Test query string with snowball": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml index 97f3fb65e94a2..cd2d2e42c6a17 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "Test default search analyzer is applied": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml index 0c263a47a38e6..0c537dd42d583 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "Test common terms query with stacked tokens": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" features: "allowed_warnings" - do: @@ -247,8 +247,8 @@ --- "Test match query with synonyms - see #3881 for extensive description of the issue": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml index 91a8b1509517e..d3f5d0fe4f8b4 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph_field_match_only_text.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml index 9792c9d2695ea..8334ca27ff274 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals_field_match_only_text.yml @@ -1,8 +1,8 @@ # integration tests for intervals queries using analyzers setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test @@ -26,8 +26,8 @@ setup: --- "Test use_field": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: catch: bad_request search: diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml index aff2b3f11101c..90596ca04205c 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/20_phrase_field_match_only_text.yml @@ -2,8 +2,8 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test @@ -122,8 +122,8 @@ setup: --- "breaks ties by sorting terms": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" # This runs the suggester without bigrams so we can be sure of the sort order - do: search: @@ -181,8 +181,8 @@ setup: --- "doesn't fail when asked to run on a field without unigrams when force_unigrams=false": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: search: rest_total_hits_as_int: true @@ -213,8 +213,8 @@ setup: --- "reverse suggestions": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: search: rest_total_hits_as_int: true diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml index 3cb8e09c70aed..1d6a938675e39 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting_field_match_only_text.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml index 085c5633ac72b..044ae5dd6a94d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.validate_query/20_query_string_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "validate_query with query_string parameters": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml index 7a96536a2e261..d1cc6c8295bd9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/30_sig_terms_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "Default index": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: goodbad diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml index bc41f157dfdc4..e21c4fb946d85 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/90_sig_text_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "Default index": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: goodbad @@ -78,8 +78,8 @@ --- "Dedup noise": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: goodbad diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml index 7100d620bf19e..9e60d69bfedd7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/20_highlighting_field_match_only_text.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: index: test diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml index 03626236604a1..69c639a8f506a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query_match_only_text.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" features: ["headers"] - do: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml index a41b8d353e3e9..13fd6b3858948 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/200_phrase_search_field_match_only_text.yml @@ -1,8 +1,8 @@ --- "search with indexed phrases": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml index fc4e9f9de0f38..682a7dded1e9b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/310_match_bool_prefix_field_match_only_text.yml @@ -1,7 +1,7 @@ setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml index f4faf87eb83cc..00e54e43d6f04 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries_field_match_only_text.yml @@ -1,8 +1,8 @@ --- setup: - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml index cc15796e4697f..44adb48c8765e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic_field_match_only_field.yml @@ -1,8 +1,8 @@ --- "Search shards aliases with and without filters": - skip: - version: " - 2.99.99" - reason: "match_only_text was added in 3.0" + version: " - 2.11.99" + reason: "match_only_text was added in 2.12" - do: indices.create: From 714fa73c4bc4c0cbaa624629c724e57be8405209 Mon Sep 17 00:00:00 2001 From: Poojita Raj <poojiraj@amazon.com> Date: Thu, 4 Jan 2024 15:13:10 -0800 Subject: [PATCH 054/178] Add logging for tests in RemoteStoreStatsIT to catch assertion failure cause (#11734) * Add logging for tests in RemoteStoreStatsIT to catch assertion failure cause Signed-off-by: Poojita Raj <poojiraj@amazon.com> * Add test logging annotation for trace logs Signed-off-by: Poojita Raj <poojiraj@amazon.com> --------- Signed-off-by: Poojita Raj <poojiraj@amazon.com> --- .../remotestore/RemoteStoreStatsIT.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java index 2d3ab135d0377..4a0af206b9d89 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreStatsIT.java @@ -29,6 +29,7 @@ import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.disruption.NetworkDisruption; +import org.opensearch.test.junit.annotations.TestLogging; import org.opensearch.test.transport.MockTransportService; import java.io.IOException; @@ -249,6 +250,7 @@ public void testStatsResponseFromLocalNode() { } } + @TestLogging(reason = "Getting trace logs from remote store package", value = "org.opensearch.remotestore:TRACE") public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exception { setup(); // Scenario: @@ -277,6 +279,13 @@ public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exce .collect(Collectors.toList()) .get(0) .getSegmentStats(); + logger.info( + "Zero state primary stats: {}ms refresh time lag, {}b bytes lag, {}b upload bytes started and {}b upload bytes failed.", + zeroStatePrimaryStats.refreshTimeLagMs, + zeroStatePrimaryStats.bytesLag, + zeroStatePrimaryStats.uploadBytesStarted, + zeroStatePrimaryStats.uploadBytesFailed + ); assertTrue( zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded && zeroStatePrimaryStats.totalUploadsSucceeded == 1 @@ -339,6 +348,7 @@ public void testDownloadStatsCorrectnessSinglePrimarySingleReplica() throws Exce } } + @TestLogging(reason = "Getting trace logs from remote store package", value = "org.opensearch.remotestore:TRACE") public void testDownloadStatsCorrectnessSinglePrimaryMultipleReplicaShards() throws Exception { setup(); // Scenario: @@ -371,6 +381,13 @@ public void testDownloadStatsCorrectnessSinglePrimaryMultipleReplicaShards() thr .collect(Collectors.toList()) .get(0) .getSegmentStats(); + logger.info( + "Zero state primary stats: {}ms refresh time lag, {}b bytes lag, {}b upload bytes started and {}b upload bytes failed.", + zeroStatePrimaryStats.refreshTimeLagMs, + zeroStatePrimaryStats.bytesLag, + zeroStatePrimaryStats.uploadBytesStarted, + zeroStatePrimaryStats.uploadBytesFailed + ); assertTrue( zeroStatePrimaryStats.totalUploadsStarted == zeroStatePrimaryStats.totalUploadsSucceeded && zeroStatePrimaryStats.totalUploadsSucceeded == 1 From 3a3da4fe353500e6d5e46785a594db33280fb56c Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Thu, 4 Jan 2024 16:25:36 -0800 Subject: [PATCH 055/178] [Segment Replication] [Remote Store] Replace overriding mockInternalEngine() in test classes with NRTReplicationEngine. (#11716) * Replace overriding mockInternalEngine() in test classes with NRTReplicationEngine. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * remove unused comment. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Add comment for explaining the conditional logic. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Update comment with exact reason for conditional logic. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --------- Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --- .../indices/replication/SegmentReplicationBaseIT.java | 5 ----- .../replication/SegmentReplicationClusterSettingIT.java | 5 ----- .../remotestore/RemoteIndexPrimaryRelocationIT.java | 5 ----- .../remotestore/RemoteStoreBaseIntegTestCase.java | 5 ----- .../SegmentReplicationUsingRemoteStoreDisruptionIT.java | 5 ----- .../snapshots/SegmentReplicationSnapshotIT.java | 5 ----- .../org/opensearch/test/engine/MockEngineFactory.java | 9 ++++++++- 7 files changed, 8 insertions(+), 31 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java index 1d93eecd6b245..641f714d33414 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationBaseIT.java @@ -60,11 +60,6 @@ protected Collection<Class<? extends Plugin>> nodePlugins() { return asList(MockTransportService.TestPlugin.class); } - @Override - protected boolean addMockInternalEngine() { - return false; - } - @Override public Settings indexSettings() { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java index c4e8ccfc0ecec..f2cb7c9c6bfc8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationClusterSettingIT.java @@ -52,11 +52,6 @@ public Settings indexSettings() { .build(); } - @Override - protected boolean addMockInternalEngine() { - return false; - } - public void testIndexReplicationSettingOverridesSegRepClusterSetting() throws Exception { Settings settings = Settings.builder().put(CLUSTER_SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT).build(); final String ANOTHER_INDEX = "test-index"; diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java index d8b7718a55377..67316ed0e6e6b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexPrimaryRelocationIT.java @@ -35,11 +35,6 @@ protected Settings nodeSettings(int nodeOrdinal) { .build(); } - @Override - protected boolean addMockInternalEngine() { - return false; - } - public Settings indexSettings() { return Settings.builder() .put(super.indexSettings()) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index 8c15ebd0505d9..d23e634bb3368 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -121,11 +121,6 @@ protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlus return indexingStats; } - @Override - protected boolean addMockInternalEngine() { - return false; - } - @Override protected Settings nodeSettings(int nodeOrdinal) { if (segmentRepoPath == null || translogRepoPath == null) { diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java index d5cdc22a15478..8372135fc55c4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java @@ -44,11 +44,6 @@ public Settings indexSettings() { return remoteStoreIndexSettings(1); } - @Override - protected boolean addMockInternalEngine() { - return false; - } - public void testCancelReplicationWhileSyncingSegments() throws Exception { Path location = randomRepoPath().toAbsolutePath(); setup(location, 0d, "metadata", Long.MAX_VALUE, 1); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java index 2c12c0abb202b..c649c4ab13e7e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SegmentReplicationSnapshotIT.java @@ -74,11 +74,6 @@ public Settings restoreIndexDocRepSettings() { return Settings.builder().put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.DOCUMENT).build(); } - @Override - protected boolean addMockInternalEngine() { - return false; - } - public void ingestData(int docCount, String indexName) throws Exception { for (int i = 0; i < docCount; i++) { client().prepareIndex(indexName).setId(Integer.toString(i)).setSource("field", "value" + i).execute().actionGet(); diff --git a/test/framework/src/main/java/org/opensearch/test/engine/MockEngineFactory.java b/test/framework/src/main/java/org/opensearch/test/engine/MockEngineFactory.java index 30cc48c588be1..102c641746b01 100644 --- a/test/framework/src/main/java/org/opensearch/test/engine/MockEngineFactory.java +++ b/test/framework/src/main/java/org/opensearch/test/engine/MockEngineFactory.java @@ -35,6 +35,7 @@ import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.EngineConfig; import org.opensearch.index.engine.EngineFactory; +import org.opensearch.index.engine.NRTReplicationEngine; public final class MockEngineFactory implements EngineFactory { @@ -46,6 +47,12 @@ public MockEngineFactory(Class<? extends FilterDirectoryReader> wrapper) { @Override public Engine newReadWriteEngine(EngineConfig config) { - return new MockInternalEngine(config, wrapper); + + /** + * Segment replication enabled replicas (i.e. read only replicas) do not use an InternalEngine so a MockInternalEngine + * will not work and an NRTReplicationEngine must be used instead. The primary shards for these indexes will + * still use a MockInternalEngine. + */ + return config.isReadOnlyReplica() ? new NRTReplicationEngine(config) : new MockInternalEngine(config, wrapper); } } From ab0f70eef70d9c75eabc4abfc8e7f1073d739ced Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Fri, 5 Jan 2024 09:20:56 -0800 Subject: [PATCH 056/178] Introduce a new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality (#11717) * Introduce a new feature flag to gate the writeable remote index functionality. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Add changelog entry. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Update changelog entry. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --------- Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --- CHANGELOG.md | 1 + .../common/settings/FeatureFlagSettings.java | 3 ++- .../org/opensearch/common/util/FeatureFlags.java | 12 ++++++++++++ 3 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0958dd41d5a84..f7aa083eb2bcb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -120,6 +120,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) - Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) +- Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index 387b0c9753574..d3285c379bcc4 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -40,7 +40,8 @@ protected FeatureFlagSettings( FeatureFlags.IDENTITY_SETTING, FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, FeatureFlags.TELEMETRY_SETTING, - FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING + FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, + FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING ) ) ); diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index 4e9b417e3433b..c54772caa574b 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -60,6 +60,12 @@ public class FeatureFlags { */ public static final String DATETIME_FORMATTER_CACHING = "opensearch.experimental.optimization.datetime_formatter_caching.enabled"; + /** + * Gates the functionality of writeable remote index + * Once the feature is ready for release, this feature flag can be removed. + */ + public static final String WRITEABLE_REMOTE_INDEX = "opensearch.experimental.feature.writeable_remote_index.enabled"; + /** * Should store the settings from opensearch.yml. */ @@ -122,4 +128,10 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { true, Property.NodeScope ); + + public static final Setting<Boolean> WRITEABLE_REMOTE_INDEX_SETTING = Setting.boolSetting( + WRITEABLE_REMOTE_INDEX, + false, + Property.NodeScope + ); } From cbfe160f32c845fc52a04405797ef90dcdc3e88a Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Fri, 5 Jan 2024 15:24:46 -0500 Subject: [PATCH 057/178] Bump netty from 4.1.100.Final to 4.1.104.Final (#11775) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 2 +- buildSrc/version.properties | 2 +- .../licenses/netty-buffer-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.101.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-socks-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-socks-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.101.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 + .../repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 | 1 - .../repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 + .../netty-transport-classes-epoll-4.1.101.Final.jar.sha1 | 1 - .../netty-transport-classes-epoll-4.1.104.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.101.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-buffer-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.101.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.101.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 + 90 files changed, 46 insertions(+), 46 deletions(-) delete mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index f7aa083eb2bcb..140c4b07ab6e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -139,7 +139,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) - Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) - Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) -- Bump `netty` from 4.1.100.Final to 4.1.101.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294)) +- Bump `netty` from 4.1.100.Final to 4.1.104.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)) - Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 74d655cfb1045..58b54e7b77a1e 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -26,7 +26,7 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 -netty = 4.1.101.Final +netty = 4.1.104.Final joda = 2.12.2 # project reactor diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 0789dea62778a..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24d0f4f2b0b0b55acc498d62f9c0921d0ea6da7b \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..0232fc58f9357 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 deleted file mode 100644 index fb2d1aa25a5ec..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cbaac9e2c2d7b86e4c1bda220cde49949c9bcaee \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..3b6cd3524d978 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5b126ceba61275f38297cacd5ea0cd6d3addee04 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 deleted file mode 100644 index ca030677c32d0..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c198ee61abba7ae6b3cb6b1033f04fb0ae79d512 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9d01e814971f2 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +50a2d899a8f8a68daed1a9b6d7750184310cc45f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 450c34d975bb9..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3f503d7554cf25edc6b83b9b4724b240c6e91d5 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..987b524aedc98 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 deleted file mode 100644 index 093ced04bee4b..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2761d380bc97e6ce64745dd9ca04538cedd40a5b \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9110503f67304 --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d75246285e5fac6f6dad47e387ed4f46f36e521d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 deleted file mode 100644 index a710d964c1803..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f86303830ab65680e45cabb531e37078ecc6791e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..522d85a3bf12e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +700fdbabab44709b0eccffe8f91c4226a5787356 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 deleted file mode 100644 index 7bb27bbfcb87d..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4d94fd6cdf7a37b15237e32434afd6b955cc591 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..30f215e47f8ad --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 deleted file mode 100644 index c792b4e834030..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -642523c57c4be15b8b461be7b1532262d193bbb3 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..9ed9b896d4b4e --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 0789dea62778a..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24d0f4f2b0b0b55acc498d62f9c0921d0ea6da7b \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..0232fc58f9357 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 deleted file mode 100644 index 8bf31f6c3a2c7..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c648f863b301e3fc62d0de098ec61be7628b8ea2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..478e7cfba1470 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 deleted file mode 100644 index 48b42bfab9287..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -40590da6c615b852384542051330e9fd51d4c4b1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..f0242709f34f7 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4fe86799eaff..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bdb16e4d308b5757123decc886896815d1daf7a3 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..1b533eea3b3b3 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 deleted file mode 100644 index cfc0befee237a..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7800aea949bf95f63df73166d144e49405b279dd \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..70777be4dc636 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 deleted file mode 100644 index f4425d5ac8e5f..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e7eddfa4dfffcbd375d265d1fd08297df94f82f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..d7c15af9312fe --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 deleted file mode 100644 index 450c34d975bb9..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3f503d7554cf25edc6b83b9b4724b240c6e91d5 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..987b524aedc98 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 deleted file mode 100644 index fd9199092452b..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0b36cc2337b4a4135df7ee2f2d77431c3b541dc5 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..5cacaf11a29ce --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 deleted file mode 100644 index 291c87ad7987b..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.101.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d0697ef1d71c6111a1b18a387fa985fd6398ace2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 new file mode 100644 index 0000000000000..64797bf11aedc --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 @@ -0,0 +1 @@ +d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file From 5f3d782ded6f38c65ec4c00f571e12a77779cf47 Mon Sep 17 00:00:00 2001 From: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> Date: Fri, 5 Jan 2024 13:54:46 -0800 Subject: [PATCH 058/178] Update skip version to match branches (#11776) Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> --- .../rest-api-spec/test/search/340_doc_values_field.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml index f3281e35ac8e6..c7b00d5fbbef2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml @@ -1,8 +1,8 @@ setup: - skip: features: [ "headers" ] - version: " - 2.99.99" - reason: "searching with only doc_values was added in 3.0.0" + version: " - 2.11.99" + reason: "searching with only doc_values was added in 2.12.0" --- "search on fields with both index and doc_values enabled": - do: From 89e47274d5d60d770d3b57161ca77509605d6898 Mon Sep 17 00:00:00 2001 From: Rishab Nahata <rnnahata@amazon.com> Date: Mon, 8 Jan 2024 12:24:31 +0530 Subject: [PATCH 059/178] Skip test testThreeZoneOneReplicaWithForceZoneValueAndLoadAwareness (#11767) Signed-off-by: Rishab Nahata <rnnahata@amazon.com> --- .../org/opensearch/cluster/allocation/AwarenessAllocationIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java index c69718d982f8b..522d63b22a0da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/cluster/allocation/AwarenessAllocationIT.java @@ -364,6 +364,7 @@ public void testAwarenessZonesIncrementalNodes() { assertThat(counts.get(noZoneNode), equalTo(2)); } + @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/5908") public void testThreeZoneOneReplicaWithForceZoneValueAndLoadAwareness() throws Exception { int nodeCountPerAZ = 5; int numOfShards = 30; From c7196da0d3a16abd319f5b066cc080dd8c306f99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 09:47:41 -0500 Subject: [PATCH 060/178] Bump commons-io:commons-io from 2.14.0 to 2.15.1 in /plugins/repository-hdfs (#11796) * Bump commons-io:commons-io in /plugins/repository-hdfs Bumps commons-io:commons-io from 2.14.0 to 2.15.1. --- updated-dependencies: - dependency-name: commons-io:commons-io dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 | 1 - plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 140c4b07ab6e0..3f273f10596e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -125,7 +125,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) -- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560)) +- Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) - Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index f04d42a2155d6..36843e3bc8700 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -75,7 +75,7 @@ dependencies { api 'commons-collections:commons-collections:3.2.2' api "org.apache.commons:commons-compress:${versions.commonscompress}" api 'org.apache.commons:commons-configuration2:2.9.0' - api 'commons-io:commons-io:2.14.0' + api 'commons-io:commons-io:2.15.1' api 'org.apache.commons:commons-lang3:3.14.0' implementation 'com.google.re2j:re2j:1.7' api 'javax.servlet:servlet-api:2.5' diff --git a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 deleted file mode 100644 index 33c5cfe53e01d..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-io-2.14.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a4c6e1f6c196339473cd2e1b037f0eb97c62755b \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 new file mode 100644 index 0000000000000..47c5d13812a36 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-io-2.15.1.jar.sha1 @@ -0,0 +1 @@ +f11560da189ab563a5c8e351941415430e9304ea \ No newline at end of file From fe98aad152b85e6e90265b99ccf00e55af296088 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Mon, 8 Jan 2024 11:45:07 -0800 Subject: [PATCH 061/178] Use SegmentReplicationTarget test to validate non-active on-disk files are reused for replication (#11786) * Use SegmentReplicationTarget test to validate non-active on-disk files are reused for replication Signed-off-by: Suraj Singh <surajrider@gmail.com> * Fix original shard level unit test Signed-off-by: Suraj Singh <surajrider@gmail.com> --------- Signed-off-by: Suraj Singh <surajrider@gmail.com> --- .../index/shard/RemoteIndexShardTests.java | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java index dd92bfb47afdb..eacc504428ef1 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.Version; +import org.opensearch.action.StepListener; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.concurrent.GatedCloseable; import org.opensearch.common.settings.Settings; @@ -371,7 +372,6 @@ public void testPrimaryRestart() throws Exception { * prevent FileAlreadyExistsException. It does so by only copying files in first round of segment replication without * committing locally so that in next round of segment replication those files are not considered for download again */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/10885") public void testSegRepSucceedsOnPreviousCopiedFiles() throws Exception { try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); @@ -388,6 +388,7 @@ public void testSegRepSucceedsOnPreviousCopiedFiles() throws Exception { ); CountDownLatch latch = new CountDownLatch(1); + logger.info("--> Starting first round of replication"); // Start first round of segment replication. This should fail with simulated error but with replica having // files in its local store but not in active reader. final SegmentReplicationTarget target = targetService.startReplication( @@ -427,6 +428,7 @@ public void onReplicationFailure( // Start next round of segment replication and not throwing exception resulting in commit on replica when(sourceFactory.get(any())).thenReturn(getRemoteStoreReplicationSource(replica, () -> {})); CountDownLatch waitForSecondRound = new CountDownLatch(1); + logger.info("--> Starting second round of replication"); final SegmentReplicationTarget newTarget = targetService.startReplication( replica, primary.getLatestReplicationCheckpoint(), @@ -560,8 +562,19 @@ public void getSegmentFiles( BiConsumer<String, Long> fileProgressTracker, ActionListener<GetSegmentFilesResponse> listener ) { - super.getSegmentFiles(replicationId, checkpoint, filesToFetch, indexShard, (fileName, bytesRecovered) -> {}, listener); - postGetFilesRunnable.run(); + StepListener<GetSegmentFilesResponse> waitForCopyFilesListener = new StepListener(); + super.getSegmentFiles( + replicationId, + checkpoint, + filesToFetch, + indexShard, + (fileName, bytesRecovered) -> {}, + waitForCopyFilesListener + ); + waitForCopyFilesListener.whenComplete(response -> { + postGetFilesRunnable.run(); + listener.onResponse(response); + }, listener::onFailure); } @Override From 97ccb5cbe47b3caab86249574def19b5448846bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 12:34:04 -0800 Subject: [PATCH 062/178] Bump net.java.dev.jna:jna from 5.13.0 to 5.14.0 (#11798) * Bump net.java.dev.jna:jna from 5.13.0 to 5.14.0 Bumps [net.java.dev.jna:jna](https://github.com/java-native-access/jna) from 5.13.0 to 5.14.0. - [Changelog](https://github.com/java-native-access/jna/blob/master/CHANGES.md) - [Commits](https://github.com/java-native-access/jna/compare/5.13.0...5.14.0) --- updated-dependencies: - dependency-name: net.java.dev.jna:jna dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3f273f10596e6..8fbdbf6000f87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -159,6 +159,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.maxmind.geoip2:geoip2` from 4.1.0 to 4.2.0 ([#11559](https://github.com/opensearch-project/OpenSearch/pull/11559)) - Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) +- Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index a42976fef572c..3c846b48549fb 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -110,7 +110,7 @@ dependencies { api 'com.netflix.nebula:gradle-info-plugin:12.1.6' api 'org.apache.rat:apache-rat:0.15' api 'commons-io:commons-io:2.15.1' - api "net.java.dev.jna:jna:5.13.0" + api "net.java.dev.jna:jna:5.14.0" api 'com.github.johnrengelman:shadow:8.1.1' api 'org.jdom:jdom2:2.0.6.1' api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}" From 0b30f306e2a67cd87b4c20c8bfc06853b9e9f772 Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Mon, 8 Jan 2024 12:51:24 -0800 Subject: [PATCH 063/178] Update dropdowns in issue templates to have no option selected by default (#11805) This change updates the bug and feature templates to set the first option as empty. This prevents form submission until an option is selected. Signed-off-by: Marc Handalian <marc.handalian@gmail.com> --- .github/ISSUE_TEMPLATE/bug_template.yml | 3 ++- .github/ISSUE_TEMPLATE/feature_request.yml | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_template.yml b/.github/ISSUE_TEMPLATE/bug_template.yml index 2cd1ee8a7e688..5f0798abe0f68 100644 --- a/.github/ISSUE_TEMPLATE/bug_template.yml +++ b/.github/ISSUE_TEMPLATE/bug_template.yml @@ -15,7 +15,7 @@ body: description: Choose a specific OpenSearch component your bug belongs to. If you are unsure which to select or if the component is not present, select "Other". multiple: false options: - - Other + - # Empty first option to force selection - Build - Clients - Cluster Manager @@ -24,6 +24,7 @@ body: - Indexing:Replication - Indexing - Libraries + - Other - Plugins - Search:Aggregations - Search:Performance diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml index d93ac8b590706..0159e771f7f80 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.yml +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -22,7 +22,7 @@ body: description: Choose a specific OpenSearch component your feature request belongs to. If you are unsure of which component to select or if the component is not present, select "Other". multiple: false options: - - Other + - # Empty first option to force selection - Build - Clients - Cluster Manager @@ -31,6 +31,7 @@ body: - Indexing:Replication - Indexing - Libraries + - Other - Plugins - Search:Aggregations - Search:Performance From e948c4037f61117d1a16aa058ea7b5b2ea3b6817 Mon Sep 17 00:00:00 2001 From: Siddhant Deshmukh <deshsid@amazon.com> Date: Mon, 8 Jan 2024 13:35:41 -0800 Subject: [PATCH 064/178] Capture query categorization metrics for additional query types and aggregations types (#11582) Signed-off-by: Siddhant Deshmukh <deshsid@amazon.com> --- CHANGELOG.md | 1 + .../SearchQueryAggregationCategorizer.java | 55 ++++++ .../action/search/SearchQueryCategorizer.java | 10 +- .../SearchQueryCategorizingVisitor.java | 38 +--- .../action/search/SearchQueryCounters.java | 185 +++++++++++++++++- .../query/FieldMaskingSpanQueryBuilder.java | 4 +- .../search/SearchQueryCategorizerTests.java | 60 +++--- 7 files changed, 286 insertions(+), 67 deletions(-) create mode 100644 server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 8fbdbf6000f87..19bcd75347444 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -185,6 +185,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) - Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) +- Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) ### Deprecated diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java new file mode 100644 index 0000000000000..607ccf182851b --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryAggregationCategorizer.java @@ -0,0 +1,55 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.PipelineAggregationBuilder; +import org.opensearch.telemetry.metrics.tags.Tags; + +import java.util.Collection; + +/** + * Increments the counters related to Aggregation Search Queries. + */ +public class SearchQueryAggregationCategorizer { + + private static final String TYPE_TAG = "type"; + private final SearchQueryCounters searchQueryCounters; + + public SearchQueryAggregationCategorizer(SearchQueryCounters searchQueryCounters) { + this.searchQueryCounters = searchQueryCounters; + } + + public void incrementSearchQueryAggregationCounters(Collection<AggregationBuilder> aggregatorFactories) { + for (AggregationBuilder aggregationBuilder : aggregatorFactories) { + incrementCountersRecursively(aggregationBuilder); + } + } + + private void incrementCountersRecursively(AggregationBuilder aggregationBuilder) { + // Increment counters for the current aggregation + String aggregationType = aggregationBuilder.getType(); + searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, aggregationType)); + + // Recursively process sub-aggregations if any + Collection<AggregationBuilder> subAggregations = aggregationBuilder.getSubAggregations(); + if (subAggregations != null && !subAggregations.isEmpty()) { + for (AggregationBuilder subAggregation : subAggregations) { + incrementCountersRecursively(subAggregation); + } + } + + // Process pipeline aggregations + Collection<PipelineAggregationBuilder> pipelineAggregations = aggregationBuilder.getPipelineAggregations(); + for (PipelineAggregationBuilder pipelineAggregation : pipelineAggregations) { + String pipelineAggregationType = pipelineAggregation.getType(); + searchQueryCounters.aggCounter.add(1, Tags.create().addTag(TYPE_TAG, pipelineAggregationType)); + } + } +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java index 8fe1be610f9af..ffaae5b08772f 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizer.java @@ -32,13 +32,15 @@ final class SearchQueryCategorizer { final SearchQueryCounters searchQueryCounters; + final SearchQueryAggregationCategorizer searchQueryAggregationCategorizer; + public SearchQueryCategorizer(MetricsRegistry metricsRegistry) { searchQueryCounters = new SearchQueryCounters(metricsRegistry); + searchQueryAggregationCategorizer = new SearchQueryAggregationCategorizer(searchQueryCounters); } public void categorize(SearchSourceBuilder source) { QueryBuilder topLevelQueryBuilder = source.query(); - logQueryShape(topLevelQueryBuilder); incrementQueryTypeCounters(topLevelQueryBuilder); incrementQueryAggregationCounters(source.aggregations()); @@ -56,9 +58,11 @@ private void incrementQuerySortCounters(List<SortBuilder<?>> sorts) { } private void incrementQueryAggregationCounters(AggregatorFactories.Builder aggregations) { - if (aggregations != null) { - searchQueryCounters.aggCounter.add(1); + if (aggregations == null) { + return; } + + searchQueryAggregationCategorizer.incrementSearchQueryAggregationCounters(aggregations.getAggregatorFactories()); } private void incrementQueryTypeCounters(QueryBuilder topLevelQueryBuilder) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java index 98f0169e69a5c..31f83dbef9dc9 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCategorizingVisitor.java @@ -9,26 +9,14 @@ package org.opensearch.action.search; import org.apache.lucene.search.BooleanClause; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.MatchPhraseQueryBuilder; -import org.opensearch.index.query.MatchQueryBuilder; -import org.opensearch.index.query.MultiMatchQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilderVisitor; -import org.opensearch.index.query.QueryStringQueryBuilder; -import org.opensearch.index.query.RangeQueryBuilder; -import org.opensearch.index.query.RegexpQueryBuilder; -import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.index.query.WildcardQueryBuilder; -import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.opensearch.telemetry.metrics.tags.Tags; /** - * Class to visit the querybuilder tree and also track the level information. + * Class to visit the query builder tree and also track the level information. * Increments the counters related to Search Query type. */ final class SearchQueryCategorizingVisitor implements QueryBuilderVisitor { - private static final String LEVEL_TAG = "level"; private final int level; private final SearchQueryCounters searchQueryCounters; @@ -42,29 +30,7 @@ private SearchQueryCategorizingVisitor(SearchQueryCounters counters, int level) } public void accept(QueryBuilder qb) { - if (qb instanceof BoolQueryBuilder) { - searchQueryCounters.boolCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof FunctionScoreQueryBuilder) { - searchQueryCounters.functionScoreCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MatchQueryBuilder) { - searchQueryCounters.matchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MatchPhraseQueryBuilder) { - searchQueryCounters.matchPhrasePrefixCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof MultiMatchQueryBuilder) { - searchQueryCounters.multiMatchCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof QueryStringQueryBuilder) { - searchQueryCounters.queryStringQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof RangeQueryBuilder) { - searchQueryCounters.rangeCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof RegexpQueryBuilder) { - searchQueryCounters.regexCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof TermQueryBuilder) { - searchQueryCounters.termCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else if (qb instanceof WildcardQueryBuilder) { - searchQueryCounters.wildcardCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } else { - searchQueryCounters.otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); - } + searchQueryCounters.incrementCounter(qb, level); } public QueryBuilderVisitor getChildVisitor(BooleanClause.Occur occur) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java index 7e0259af07701..bbb883809b41b 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java +++ b/server/src/main/java/org/opensearch/action/search/SearchQueryCounters.java @@ -8,33 +8,82 @@ package org.opensearch.action.search; +import org.opensearch.index.query.BoolQueryBuilder; +import org.opensearch.index.query.BoostingQueryBuilder; +import org.opensearch.index.query.ConstantScoreQueryBuilder; +import org.opensearch.index.query.DisMaxQueryBuilder; +import org.opensearch.index.query.DistanceFeatureQueryBuilder; +import org.opensearch.index.query.ExistsQueryBuilder; +import org.opensearch.index.query.FieldMaskingSpanQueryBuilder; +import org.opensearch.index.query.FuzzyQueryBuilder; +import org.opensearch.index.query.GeoBoundingBoxQueryBuilder; +import org.opensearch.index.query.GeoDistanceQueryBuilder; +import org.opensearch.index.query.GeoPolygonQueryBuilder; +import org.opensearch.index.query.GeoShapeQueryBuilder; +import org.opensearch.index.query.IntervalQueryBuilder; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.index.query.MatchPhraseQueryBuilder; +import org.opensearch.index.query.MatchQueryBuilder; +import org.opensearch.index.query.MultiMatchQueryBuilder; +import org.opensearch.index.query.PrefixQueryBuilder; +import org.opensearch.index.query.QueryBuilder; +import org.opensearch.index.query.QueryStringQueryBuilder; +import org.opensearch.index.query.RangeQueryBuilder; +import org.opensearch.index.query.RegexpQueryBuilder; +import org.opensearch.index.query.ScriptQueryBuilder; +import org.opensearch.index.query.SimpleQueryStringBuilder; +import org.opensearch.index.query.TermQueryBuilder; +import org.opensearch.index.query.WildcardQueryBuilder; +import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.opensearch.telemetry.metrics.Counter; import org.opensearch.telemetry.metrics.MetricsRegistry; +import org.opensearch.telemetry.metrics.tags.Tags; + +import java.util.HashMap; +import java.util.Map; /** * Class contains all the Counters related to search query types. */ final class SearchQueryCounters { + private static final String LEVEL_TAG = "level"; private static final String UNIT = "1"; private final MetricsRegistry metricsRegistry; // Counters related to Query types public final Counter aggCounter; public final Counter boolCounter; + public final Counter boostingCounter; + public final Counter constantScoreCounter; + public final Counter disMaxCounter; + public final Counter distanceFeatureCounter; + public final Counter existsCounter; + public final Counter fieldMaskingSpanCounter; public final Counter functionScoreCounter; + public final Counter fuzzyCounter; + public final Counter geoBoundingBoxCounter; + public final Counter geoDistanceCounter; + public final Counter geoPolygonCounter; + public final Counter geoShapeCounter; + public final Counter intervalCounter; public final Counter matchCounter; + public final Counter matchallCounter; public final Counter matchPhrasePrefixCounter; public final Counter multiMatchCounter; public final Counter otherQueryCounter; - public final Counter queryStringQueryCounter; + public final Counter prefixCounter; + public final Counter queryStringCounter; public final Counter rangeCounter; - public final Counter regexCounter; - + public final Counter regexpCounter; + public final Counter scriptCounter; + public final Counter simpleQueryStringCounter; public final Counter sortCounter; public final Counter skippedCounter; public final Counter termCounter; public final Counter totalCounter; public final Counter wildcardCounter; + public final Counter numberOfInputFieldsCounter; + private final Map<Class<? extends QueryBuilder>, Counter> queryHandlers; public SearchQueryCounters(MetricsRegistry metricsRegistry) { this.metricsRegistry = metricsRegistry; @@ -48,16 +97,81 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested bool search queries", UNIT ); + this.boostingCounter = metricsRegistry.createCounter( + "search.query.type.boost.count", + "Counter for the number of top level and nested boost search queries", + UNIT + ); + this.constantScoreCounter = metricsRegistry.createCounter( + "search.query.type.counstantscore.count", + "Counter for the number of top level and nested constant score search queries", + UNIT + ); + this.disMaxCounter = metricsRegistry.createCounter( + "search.query.type.dismax.count", + "Counter for the number of top level and nested disjuntion max search queries", + UNIT + ); + this.distanceFeatureCounter = metricsRegistry.createCounter( + "search.query.type.distancefeature.count", + "Counter for the number of top level and nested distance feature search queries", + UNIT + ); + this.existsCounter = metricsRegistry.createCounter( + "search.query.type.exists.count", + "Counter for the number of top level and nested exists search queries", + UNIT + ); + this.fieldMaskingSpanCounter = metricsRegistry.createCounter( + "search.query.type.fieldmaskingspan.count", + "Counter for the number of top level and nested field masking span search queries", + UNIT + ); this.functionScoreCounter = metricsRegistry.createCounter( "search.query.type.functionscore.count", "Counter for the number of top level and nested function score search queries", UNIT ); + this.fuzzyCounter = metricsRegistry.createCounter( + "search.query.type.fuzzy.count", + "Counter for the number of top level and nested fuzzy search queries", + UNIT + ); + this.geoBoundingBoxCounter = metricsRegistry.createCounter( + "search.query.type.geoboundingbox.count", + "Counter for the number of top level and nested geo bounding box queries", + UNIT + ); + this.geoDistanceCounter = metricsRegistry.createCounter( + "search.query.type.geodistance.count", + "Counter for the number of top level and nested geo distance queries", + UNIT + ); + this.geoPolygonCounter = metricsRegistry.createCounter( + "search.query.type.geopolygon.count", + "Counter for the number of top level and nested geo polygon queries", + UNIT + ); + this.geoShapeCounter = metricsRegistry.createCounter( + "search.query.type.geoshape.count", + "Counter for the number of top level and nested geo shape queries", + UNIT + ); + this.intervalCounter = metricsRegistry.createCounter( + "search.query.type.interval.count", + "Counter for the number of top level and nested interval queries", + UNIT + ); this.matchCounter = metricsRegistry.createCounter( "search.query.type.match.count", "Counter for the number of top level and nested match search queries", UNIT ); + this.matchallCounter = metricsRegistry.createCounter( + "search.query.type.matchall.count", + "Counter for the number of top level and nested match all search queries", + UNIT + ); this.matchPhrasePrefixCounter = metricsRegistry.createCounter( "search.query.type.matchphrase.count", "Counter for the number of top level and nested match phrase prefix search queries", @@ -73,7 +187,12 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested search queries that do not match any other categories", UNIT ); - this.queryStringQueryCounter = metricsRegistry.createCounter( + this.prefixCounter = metricsRegistry.createCounter( + "search.query.type.prefix.count", + "Counter for the number of top level and nested search queries that match prefix queries", + UNIT + ); + this.queryStringCounter = metricsRegistry.createCounter( "search.query.type.querystringquery.count", "Counter for the number of top level and nested queryStringQuery search queries", UNIT @@ -83,11 +202,21 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested range search queries", UNIT ); - this.regexCounter = metricsRegistry.createCounter( + this.regexpCounter = metricsRegistry.createCounter( "search.query.type.regex.count", "Counter for the number of top level and nested regex search queries", UNIT ); + this.scriptCounter = metricsRegistry.createCounter( + "search.query.type.script.count", + "Counter for the number of top level and nested script search queries", + UNIT + ); + this.simpleQueryStringCounter = metricsRegistry.createCounter( + "search.query.type.simplequerystring.count", + "Counter for the number of top level and nested script simple query string search queries", + UNIT + ); this.skippedCounter = metricsRegistry.createCounter( "search.query.type.skipped.count", "Counter for the number queries skipped due to error", @@ -113,5 +242,51 @@ public SearchQueryCounters(MetricsRegistry metricsRegistry) { "Counter for the number of top level and nested wildcard search queries", UNIT ); + this.numberOfInputFieldsCounter = metricsRegistry.createCounter( + "search.query.type.numberofinputfields.count", + "Counter for the number of input fields in the search queries", + UNIT + ); + this.queryHandlers = new HashMap<>(); + initializeQueryHandlers(); + } + + public void incrementCounter(QueryBuilder queryBuilder, int level) { + Counter counter = queryHandlers.get(queryBuilder.getClass()); + if (counter != null) { + counter.add(1, Tags.create().addTag(LEVEL_TAG, level)); + } else { + otherQueryCounter.add(1, Tags.create().addTag(LEVEL_TAG, level)); + } + } + + private void initializeQueryHandlers() { + + queryHandlers.put(BoolQueryBuilder.class, boolCounter); + queryHandlers.put(FunctionScoreQueryBuilder.class, functionScoreCounter); + queryHandlers.put(MatchQueryBuilder.class, matchCounter); + queryHandlers.put(MatchPhraseQueryBuilder.class, matchPhrasePrefixCounter); + queryHandlers.put(MultiMatchQueryBuilder.class, multiMatchCounter); + queryHandlers.put(QueryStringQueryBuilder.class, queryStringCounter); + queryHandlers.put(RangeQueryBuilder.class, rangeCounter); + queryHandlers.put(RegexpQueryBuilder.class, regexpCounter); + queryHandlers.put(TermQueryBuilder.class, termCounter); + queryHandlers.put(WildcardQueryBuilder.class, wildcardCounter); + queryHandlers.put(BoostingQueryBuilder.class, boostingCounter); + queryHandlers.put(ConstantScoreQueryBuilder.class, constantScoreCounter); + queryHandlers.put(DisMaxQueryBuilder.class, disMaxCounter); + queryHandlers.put(DistanceFeatureQueryBuilder.class, distanceFeatureCounter); + queryHandlers.put(ExistsQueryBuilder.class, existsCounter); + queryHandlers.put(FieldMaskingSpanQueryBuilder.class, fieldMaskingSpanCounter); + queryHandlers.put(FuzzyQueryBuilder.class, fuzzyCounter); + queryHandlers.put(GeoBoundingBoxQueryBuilder.class, geoBoundingBoxCounter); + queryHandlers.put(GeoDistanceQueryBuilder.class, geoDistanceCounter); + queryHandlers.put(GeoPolygonQueryBuilder.class, geoPolygonCounter); + queryHandlers.put(GeoShapeQueryBuilder.class, geoShapeCounter); + queryHandlers.put(IntervalQueryBuilder.class, intervalCounter); + queryHandlers.put(MatchAllQueryBuilder.class, matchallCounter); + queryHandlers.put(PrefixQueryBuilder.class, prefixCounter); + queryHandlers.put(ScriptQueryBuilder.class, scriptCounter); + queryHandlers.put(SimpleQueryStringBuilder.class, simpleQueryStringCounter); } } diff --git a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java index 1162689a54689..4e73d87b07b7a 100644 --- a/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/FieldMaskingSpanQueryBuilder.java @@ -54,7 +54,9 @@ * @opensearch.internal */ public class FieldMaskingSpanQueryBuilder extends AbstractQueryBuilder<FieldMaskingSpanQueryBuilder> implements SpanQueryBuilder { - public static final ParseField SPAN_FIELD_MASKING_FIELD = new ParseField("span_field_masking", "field_masking_span"); + + public static final String NAME = "span_field_masking"; + public static final ParseField SPAN_FIELD_MASKING_FIELD = new ParseField(NAME, "field_masking_span"); private static final ParseField FIELD_FIELD = new ParseField("field"); private static final ParseField QUERY_FIELD = new ParseField("query"); diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java index a2e301143d694..17fa124890158 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryCategorizerTests.java @@ -34,16 +34,19 @@ import java.util.Arrays; -import org.mockito.Mockito; +import org.mockito.ArgumentCaptor; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public final class SearchQueryCategorizerTests extends OpenSearchTestCase { + private static final String MULTI_TERMS_AGGREGATION = "multi_terms"; + private MetricsRegistry metricsRegistry; private SearchQueryCategorizer searchQueryCategorizer; @@ -71,7 +74,20 @@ public void testAggregationsQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d)); + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d), any(Tags.class)); + + // capture the arguments passed to the aggCounter.add method + ArgumentCaptor<Double> valueCaptor = ArgumentCaptor.forClass(Double.class); + ArgumentCaptor<Tags> tagsCaptor = ArgumentCaptor.forClass(Tags.class); + + // Verify that aggCounter.add was called with the expected arguments + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(valueCaptor.capture(), tagsCaptor.capture()); + + double actualValue = valueCaptor.getValue(); + String actualTag = (String) tagsCaptor.getValue().getTagsMap().get("type"); + + assertEquals(1.0d, actualValue, 0.0001); + assertEquals(MULTI_TERMS_AGGREGATION, actualTag); } public void testBoolQuery() { @@ -81,8 +97,8 @@ public void testBoolQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); } public void testFunctionScoreQuery() { @@ -92,7 +108,7 @@ public void testFunctionScoreQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.functionScoreCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.functionScoreCounter).add(eq(1.0d), any(Tags.class)); } public void testMatchQuery() { @@ -102,7 +118,7 @@ public void testMatchQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); } public void testMatchPhraseQuery() { @@ -112,7 +128,7 @@ public void testMatchPhraseQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchPhrasePrefixCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchPhrasePrefixCounter).add(eq(1.0d), any(Tags.class)); } public void testMultiMatchQuery() { @@ -122,7 +138,7 @@ public void testMultiMatchQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.multiMatchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.multiMatchCounter).add(eq(1.0d), any(Tags.class)); } public void testOtherQuery() { @@ -136,8 +152,8 @@ public void testOtherQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.otherQueryCounter, times(2)).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.otherQueryCounter, times(1)).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); } public void testQueryStringQuery() { @@ -148,7 +164,7 @@ public void testQueryStringQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.queryStringQueryCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.queryStringCounter).add(eq(1.0d), any(Tags.class)); } public void testRangeQuery() { @@ -160,7 +176,7 @@ public void testRangeQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.rangeCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.rangeCounter).add(eq(1.0d), any(Tags.class)); } public void testRegexQuery() { @@ -169,7 +185,7 @@ public void testRegexQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.regexCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.regexpCounter).add(eq(1.0d), any(Tags.class)); } public void testSortQuery() { @@ -180,8 +196,8 @@ public void testSortQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.sortCounter, times(2)).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.sortCounter, times(2)).add(eq(1.0d), any(Tags.class)); } public void testTermQuery() { @@ -191,7 +207,7 @@ public void testTermQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); } public void testWildcardQuery() { @@ -201,7 +217,7 @@ public void testWildcardQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.wildcardCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.wildcardCounter).add(eq(1.0d), any(Tags.class)); } public void testComplexQuery() { @@ -219,10 +235,10 @@ public void testComplexQuery() { searchQueryCategorizer.categorize(sourceBuilder); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.regexCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); - Mockito.verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d)); + verify(searchQueryCategorizer.searchQueryCounters.termCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.matchCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.regexpCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.boolCounter).add(eq(1.0d), any(Tags.class)); + verify(searchQueryCategorizer.searchQueryCounters.aggCounter).add(eq(1.0d), any(Tags.class)); } } From 2de44a7b771c9b8b59f57069d0fdfdf9ee818ec2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Jan 2024 15:28:32 -0800 Subject: [PATCH 065/178] Bump lycheeverse/lychee-action from 1.8.0 to 1.9.0 (#11795) * Bump lycheeverse/lychee-action from 1.8.0 to 1.9.0 Bumps [lycheeverse/lychee-action](https://github.com/lycheeverse/lychee-action) from 1.8.0 to 1.9.0. - [Release notes](https://github.com/lycheeverse/lychee-action/releases) - [Commits](https://github.com/lycheeverse/lychee-action/compare/v1.8.0...v1.9.0) --- updated-dependencies: - dependency-name: lycheeverse/lychee-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- .github/workflows/links.yml | 2 +- CHANGELOG.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index ca026f530b4af..2714d45bd108f 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v1.8.0 + uses: lycheeverse/lychee-action@v1.9.0 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/CHANGELOG.md b/CHANGELOG.md index 19bcd75347444..c0d39f1724353 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -160,6 +160,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) +- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.0 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) From a1e4602b8dcd5676d1e7b410d86242d7a555d619 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Tue, 9 Jan 2024 22:11:04 +0800 Subject: [PATCH 066/178] Enable must_exist parameter for update aliases API (#11210) * Enable must_exist parameter for update aliases API Signed-off-by: Gao Binlong <gbinlong@amazon.com> * modify changelog Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Fix test failure Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Fix test failure Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Remove silently when all aliases are non-existing and must_exist is false Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Modify some comments to run gradle check again Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Add more yaml test Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../40_remove_with_must_exist.yml | 141 ++++++++++++++++++ .../indices/alias/IndicesAliasesRequest.java | 14 +- .../alias/TransportIndicesAliasesAction.java | 25 +++- .../cluster/metadata/AliasAction.java | 5 +- .../indices/alias/AliasActionsTests.java | 7 + .../MetadataIndexAliasesServiceTests.java | 7 +- .../alias/RandomAliasActionsGenerator.java | 5 + 8 files changed, 196 insertions(+), 9 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index c0d39f1724353..45ab53b0d4246 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -107,6 +107,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Implement on behalf of token passing for extensions ([#8679](https://github.com/opensearch-project/OpenSearch/pull/8679)) - Provide service accounts tokens to extensions ([#9618](https://github.com/opensearch-project/OpenSearch/pull/9618)) - [Streaming Indexing] Introduce new experimental server HTTP transport based on Netty 4 and Project Reactor (Reactor Netty) ([#9672](https://github.com/opensearch-project/OpenSearch/pull/9672)) +- Enable must_exist parameter for update aliases API ([#11210](https://github.com/opensearch-project/OpenSearch/pull/11210)) - Add back half_float BKD based sort query optimization ([#11024](https://github.com/opensearch-project/OpenSearch/pull/11024)) - Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650)) - Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml new file mode 100644 index 0000000000000..dbf6a4fad3295 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml @@ -0,0 +1,141 @@ +--- +"Throw aliases missing exception when removing non-existing alias with setting must_exist to true": + - skip: + version: " - 2.99.99" + reason: "introduced in 3.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + catch: /aliases \[test_alias\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + alias: test_alias + must_exist: true + + - do: + catch: /aliases \[testAlias\*\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [ testAlias* ] + must_exist: true + + - do: + catch: /\[aliases\] can't be empty/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [] + must_exist: true + +--- +"Throw aliases missing exception when all of the specified aliases are non-existing": + - skip: + version: " - 2.99.99" + reason: "introduced in 3.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + catch: /aliases \[test\_alias\] missing/ + indices.update_aliases: + body: + actions: + - remove: + index: test_index + alias: test_alias + + - do: + catch: /aliases \[test\_alias\*\] missing/ + indices.update_aliases: + body: + actions: + - remove: + indices: [ test_index ] + aliases: [ test_alias* ] + +--- +"Remove successfully when some specified aliases are non-existing": + - skip: + version: " - 2.99.99" + reason: "introduced in 3.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + indices.update_aliases: + body: + actions: + - add: + indices: [ test_index ] + aliases: [ test_alias ] + + - do: + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [test_alias, test_alias1, test_alias2] + must_exist: false + + - match: { acknowledged: true } + +--- +"Remove silently when all of the specified aliases are non-existing and must_exist is false": + - skip: + version: " - 2.99.99" + reason: "introduced in 3.0" + + - do: + indices.create: + index: test_index + + - do: + indices.exists_alias: + name: test_alias + + - is_false: '' + + - do: + indices.update_aliases: + body: + actions: + - remove: + index: test_index + aliases: [test_alias, test_alias1, test_alias2] + must_exist: false + + - match: { acknowledged: true } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java index bc27c70282368..6ce62dda32d0a 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -220,9 +220,11 @@ private static ObjectParser<AliasActions, Void> parser(String name, Supplier<Ali ADD_PARSER.declareField(AliasActions::searchRouting, XContentParser::text, SEARCH_ROUTING, ValueType.INT); ADD_PARSER.declareField(AliasActions::writeIndex, XContentParser::booleanValue, IS_WRITE_INDEX, ValueType.BOOLEAN); ADD_PARSER.declareField(AliasActions::isHidden, XContentParser::booleanValue, IS_HIDDEN, ValueType.BOOLEAN); - ADD_PARSER.declareField(AliasActions::mustExist, XContentParser::booleanValue, MUST_EXIST, ValueType.BOOLEAN); } private static final ObjectParser<AliasActions, Void> REMOVE_PARSER = parser(REMOVE.getPreferredName(), AliasActions::remove); + static { + REMOVE_PARSER.declareField(AliasActions::mustExist, XContentParser::booleanValue, MUST_EXIST, ValueType.BOOLEAN); + } private static final ObjectParser<AliasActions, Void> REMOVE_INDEX_PARSER = parser( REMOVE_INDEX.getPreferredName(), AliasActions::removeIndex @@ -554,6 +556,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (null != isHidden) { builder.field(IS_HIDDEN.getPreferredName(), isHidden); } + if (null != mustExist) { + builder.field(MUST_EXIST.getPreferredName(), mustExist); + } builder.endObject(); builder.endObject(); return builder; @@ -582,6 +587,8 @@ public String toString() { + searchRouting + ",writeIndex=" + writeIndex + + ",mustExist=" + + mustExist + "]"; } @@ -600,12 +607,13 @@ public boolean equals(Object obj) { && Objects.equals(indexRouting, other.indexRouting) && Objects.equals(searchRouting, other.searchRouting) && Objects.equals(writeIndex, other.writeIndex) - && Objects.equals(isHidden, other.isHidden); + && Objects.equals(isHidden, other.isHidden) + && Objects.equals(mustExist, other.mustExist); } @Override public int hashCode() { - return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden); + return Objects.hash(type, indices, aliases, filter, routing, indexRouting, searchRouting, writeIndex, isHidden, mustExist); } } diff --git a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java index 769044f3dafb7..81cb3102cfcb9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/alias/TransportIndicesAliasesAction.java @@ -50,6 +50,7 @@ import org.opensearch.cluster.metadata.MetadataIndexAliasesService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.common.regex.Regex; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.Index; @@ -59,6 +60,7 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -220,12 +222,33 @@ private static String[] concreteAliases(IndicesAliasesRequest.AliasActions actio // for DELETE we expand the aliases String[] indexAsArray = { concreteIndex }; final Map<String, List<AliasMetadata>> aliasMetadata = metadata.findAliases(action, indexAsArray); - List<String> finalAliases = new ArrayList<>(); + Set<String> finalAliases = new HashSet<>(); for (final List<AliasMetadata> curAliases : aliasMetadata.values()) { for (AliasMetadata aliasMeta : curAliases) { finalAliases.add(aliasMeta.alias()); } } + + // must_exist can only be set in the Remove Action in Update aliases API, + // we check the value here to make the behavior consistent with Delete aliases API + if (action.mustExist() != null) { + // if must_exist is false, we should make the remove action execute silently, + // so we return the original specified aliases to avoid AliasesNotFoundException + if (!action.mustExist()) { + return action.aliases(); + } + + // if there is any non-existing aliases specified in the request and must_exist is true, throw exception in advance + if (finalAliases.isEmpty()) { + throw new AliasesNotFoundException(action.aliases()); + } + String[] nonExistingAliases = Arrays.stream(action.aliases()) + .filter(originalAlias -> finalAliases.stream().noneMatch(finalAlias -> Regex.simpleMatch(originalAlias, finalAlias))) + .toArray(String[]::new); + if (nonExistingAliases.length != 0) { + throw new AliasesNotFoundException(nonExistingAliases); + } + } return finalAliases.toArray(new String[0]); } else { // for ADD and REMOVE_INDEX we just return the current aliases diff --git a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java index 47c6cad04343e..dac6ecaa13781 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/AliasAction.java @@ -35,6 +35,7 @@ import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.common.Nullable; import org.opensearch.core.common.Strings; +import org.opensearch.rest.action.admin.indices.AliasesNotFoundException; /** * Individual operation to perform on the cluster state as part of an {@link IndicesAliasesRequest}. @@ -225,8 +226,8 @@ boolean removeIndex() { @Override boolean apply(NewAliasValidator aliasValidator, Metadata.Builder metadata, IndexMetadata index) { if (false == index.getAliases().containsKey(alias)) { - if (mustExist != null && mustExist.booleanValue()) { - throw new IllegalArgumentException("required alias [" + alias + "] does not exist"); + if (mustExist != null && mustExist) { + throw new AliasesNotFoundException(alias); } return false; } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java index 8ba8d226715ed..d463782a70506 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/alias/AliasActionsTests.java @@ -241,6 +241,7 @@ public void testParseRemove() throws IOException { String[] indices = generateRandomStringArray(10, 5, false, false); String[] aliases = generateRandomStringArray(10, 5, false, false); XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); + boolean mustExist = randomBoolean(); b.startObject(); { b.startObject("remove"); @@ -255,6 +256,9 @@ public void testParseRemove() throws IOException { } else { b.field("alias", aliases[0]); } + if (mustExist) { + b.field("must_exist", true); + } } b.endObject(); } @@ -265,6 +269,9 @@ public void testParseRemove() throws IOException { assertEquals(AliasActions.Type.REMOVE, action.actionType()); assertThat(action.indices(), equalTo(indices)); assertThat(action.aliases(), equalTo(aliases)); + if (mustExist) { + assertThat(action.mustExist(), equalTo(true)); + } } } diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index bf66f577e182b..9fb4551f106ec 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -40,6 +40,7 @@ import org.opensearch.common.util.set.Sets; import org.opensearch.core.index.Index; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.rest.action.admin.indices.AliasesNotFoundException; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.VersionUtils; @@ -164,11 +165,11 @@ public void testMustExist() { // Show that removing non-existing alias with mustExist == true fails final ClusterState finalCS = after; - final IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, + final AliasesNotFoundException iae = expectThrows( + AliasesNotFoundException.class, () -> service.applyAliasActions(finalCS, singletonList(new AliasAction.Remove(index, "test_2", true))) ); - assertThat(iae.getMessage(), containsString("required alias [test_2] does not exist")); + assertThat(iae.getMessage(), containsString("aliases [test_2] missing")); } public void testMultipleIndices() { diff --git a/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java b/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java index ce862d2b69d8f..fb9db9a578b4a 100644 --- a/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java +++ b/test/framework/src/main/java/org/opensearch/index/alias/RandomAliasActionsGenerator.java @@ -102,6 +102,11 @@ public static AliasActions randomAliasAction(boolean useStringAsFilter) { action.isHidden(randomBoolean()); } } + if (action.actionType() == AliasActions.Type.REMOVE) { + if (randomBoolean()) { + action.mustExist(randomBoolean()); + } + } return action; } From 3f5ec64bc296d1bd4e27d66ae14e9f65a1f29d81 Mon Sep 17 00:00:00 2001 From: Peter Nied <petern@amazon.com> Date: Tue, 9 Jan 2024 10:19:21 -0600 Subject: [PATCH 067/178] Bump peternied/required-approval from v1.2 to v1.3 (#11820) * Switched to more reliable OpenSearch Lucene snapshot location - Related https://github.com/opensearch-project/opensearch-build/issues/3874 Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> * Changelog entry Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> * Use required-approval@main * Trigger on any review Signed-off-by: Peter Nied <peternied@hotmail.com> * Snap to version v1.3 Signed-off-by: Peter Nied <peternied@hotmail.com> --------- Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> --- .github/workflows/maintainer-approval.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml index 2f87afd372d90..34e8f57cc1878 100644 --- a/.github/workflows/maintainer-approval.yml +++ b/.github/workflows/maintainer-approval.yml @@ -2,7 +2,6 @@ name: Maintainers approval on: pull_request_review: - types: [submitted] jobs: maintainer-approved-check: @@ -26,7 +25,7 @@ jobs: return maintainersResponse.data.map(item => item.login).join(', '); - - uses: peternied/required-approval@v1.2 + - uses: peternied/required-approval@v1.3 with: token: ${{ secrets.GITHUB_TOKEN }} min-required: 1 From 71f1fabe149fd0777edf44502ace4a8f0911feeb Mon Sep 17 00:00:00 2001 From: Gagan Juneja <gagandeepjuneja@gmail.com> Date: Tue, 9 Jan 2024 22:54:02 +0530 Subject: [PATCH 068/178] Add support for conditional Transient header propagation (#11490) * Clear transient header from system context Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Clear transient header from system context Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Adds changelog Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Update CHANGELOG.md Co-authored-by: Andriy Redko <drreta@gmail.com> Signed-off-by: Gagan Juneja <gagandeepjuneja@gmail.com> * Adds unit tests Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Refactor code Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Refactor code Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Refactor code Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Supress warning Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Refactor code Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> --------- Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> Signed-off-by: Gagan Juneja <gagandeepjuneja@gmail.com> Co-authored-by: Gagan Juneja <gjjuneja@amazon.com> Co-authored-by: Andriy Redko <drreta@gmail.com> --- CHANGELOG.md | 1 + .../common/util/concurrent/ThreadContext.java | 24 ++++--- .../ThreadContextStatePropagator.java | 30 ++++++++- .../TaskThreadContextStatePropagator.java | 13 ++++ ...hreadContextBasedTracerContextStorage.java | 19 +++++- .../transport/TransportService.java | 17 ++--- .../util/concurrent/ThreadContextTests.java | 67 +++++++++++++++++++ ...TaskThreadContextStatePropagatorTests.java | 34 ++++++++++ ...ContextBasedTracerContextStorageTests.java | 16 +++++ 9 files changed, 193 insertions(+), 28 deletions(-) create mode 100644 server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 45ab53b0d4246..6da048c65d8d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -214,6 +214,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) - Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) +- Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) ### Security diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java index 3da21a6777456..6580b0e0085ef 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java @@ -161,7 +161,7 @@ public StoredContext stashContext() { ); } - final Map<String, Object> transientHeaders = propagateTransients(context.transientHeaders); + final Map<String, Object> transientHeaders = propagateTransients(context.transientHeaders, context.isSystemContext); if (!transientHeaders.isEmpty()) { threadContextStruct = threadContextStruct.putTransient(transientHeaders); } @@ -182,7 +182,7 @@ public StoredContext stashContext() { public Writeable captureAsWriteable() { final ThreadContextStruct context = threadLocal.get(); return out -> { - final Map<String, String> propagatedHeaders = propagateHeaders(context.transientHeaders); + final Map<String, String> propagatedHeaders = propagateHeaders(context.transientHeaders, context.isSystemContext); context.writeTo(out, defaultHeader, propagatedHeaders); }; } @@ -245,7 +245,7 @@ public StoredContext newStoredContext(boolean preserveResponseHeaders, Collectio final Map<String, Object> newTransientHeaders = new HashMap<>(originalContext.transientHeaders); boolean transientHeadersModified = false; - final Map<String, Object> transientHeaders = propagateTransients(originalContext.transientHeaders); + final Map<String, Object> transientHeaders = propagateTransients(originalContext.transientHeaders, originalContext.isSystemContext); if (!transientHeaders.isEmpty()) { newTransientHeaders.putAll(transientHeaders); transientHeadersModified = true; @@ -322,7 +322,7 @@ public Supplier<StoredContext> wrapRestorable(StoredContext storedContext) { @Override public void writeTo(StreamOutput out) throws IOException { final ThreadContextStruct context = threadLocal.get(); - final Map<String, String> propagatedHeaders = propagateHeaders(context.transientHeaders); + final Map<String, String> propagatedHeaders = propagateHeaders(context.transientHeaders, context.isSystemContext); context.writeTo(out, defaultHeader, propagatedHeaders); } @@ -534,7 +534,7 @@ boolean isDefaultContext() { * by the system itself rather than by a user action. */ public void markAsSystemContext() { - threadLocal.set(threadLocal.get().setSystemContext()); + threadLocal.set(threadLocal.get().setSystemContext(propagators)); } /** @@ -573,15 +573,15 @@ public static Map<String, String> buildDefaultHeaders(Settings settings) { } } - private Map<String, Object> propagateTransients(Map<String, Object> source) { + private Map<String, Object> propagateTransients(Map<String, Object> source, boolean isSystemContext) { final Map<String, Object> transients = new HashMap<>(); - propagators.forEach(p -> transients.putAll(p.transients(source))); + propagators.forEach(p -> transients.putAll(p.transients(source, isSystemContext))); return transients; } - private Map<String, String> propagateHeaders(Map<String, Object> source) { + private Map<String, String> propagateHeaders(Map<String, Object> source, boolean isSystemContext) { final Map<String, String> headers = new HashMap<>(); - propagators.forEach(p -> headers.putAll(p.headers(source))); + propagators.forEach(p -> headers.putAll(p.headers(source, isSystemContext))); return headers; } @@ -603,11 +603,13 @@ private static final class ThreadContextStruct { // saving current warning headers' size not to recalculate the size with every new warning header private final long warningHeadersSize; - private ThreadContextStruct setSystemContext() { + private ThreadContextStruct setSystemContext(final List<ThreadContextStatePropagator> propagators) { if (isSystemContext) { return this; } - return new ThreadContextStruct(requestHeaders, responseHeaders, transientHeaders, persistentHeaders, true); + final Map<String, Object> transients = new HashMap<>(); + propagators.forEach(p -> transients.putAll(p.transients(transientHeaders, true))); + return new ThreadContextStruct(requestHeaders, responseHeaders, transients, persistentHeaders, true); } private ThreadContextStruct( diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java index dac70b0e8124e..e8c12ae13d5eb 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContextStatePropagator.java @@ -22,15 +22,41 @@ public interface ThreadContextStatePropagator { /** * Returns the list of transient headers that needs to be propagated from current context to new thread context. - * @param source current context transient headers + * + * @param source current context transient headers * @return the list of transient headers that needs to be propagated from current context to new thread context */ + @Deprecated(since = "2.12.0", forRemoval = true) Map<String, Object> transients(Map<String, Object> source); + /** + * Returns the list of transient headers that needs to be propagated from current context to new thread context. + * + * @param source current context transient headers + * @param isSystemContext if the propagation is for system context. + * @return the list of transient headers that needs to be propagated from current context to new thread context + */ + default Map<String, Object> transients(Map<String, Object> source, boolean isSystemContext) { + return transients(source); + }; + /** * Returns the list of request headers that needs to be propagated from current context to request. - * @param source current context headers + * + * @param source current context headers * @return the list of request headers that needs to be propagated from current context to request */ + @Deprecated(since = "2.12.0", forRemoval = true) Map<String, String> headers(Map<String, Object> source); + + /** + * Returns the list of request headers that needs to be propagated from current context to request. + * + * @param source current context headers + * @param isSystemContext if the propagation is for system context. + * @return the list of request headers that needs to be propagated from current context to request + */ + default Map<String, String> headers(Map<String, Object> source, boolean isSystemContext) { + return headers(source); + } } diff --git a/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java b/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java index ed111b34f048f..99559e45aaaee 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java +++ b/server/src/main/java/org/opensearch/tasks/TaskThreadContextStatePropagator.java @@ -20,7 +20,9 @@ * Propagates TASK_ID across thread contexts */ public class TaskThreadContextStatePropagator implements ThreadContextStatePropagator { + @Override + @SuppressWarnings("removal") public Map<String, Object> transients(Map<String, Object> source) { final Map<String, Object> transients = new HashMap<>(); @@ -32,7 +34,18 @@ public Map<String, Object> transients(Map<String, Object> source) { } @Override + public Map<String, Object> transients(Map<String, Object> source, boolean isSystemContext) { + return transients(source); + } + + @Override + @SuppressWarnings("removal") public Map<String, String> headers(Map<String, Object> source) { return Collections.emptyMap(); } + + @Override + public Map<String, String> headers(Map<String, Object> source, boolean isSystemContext) { + return headers(source); + } } diff --git a/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java b/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java index 863f56d9fbe94..908164d1935a7 100644 --- a/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java +++ b/server/src/main/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorage.java @@ -12,6 +12,7 @@ import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.common.util.concurrent.ThreadContextStatePropagator; +import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -50,20 +51,29 @@ public void put(String key, Span span) { } @Override + @SuppressWarnings("removal") public Map<String, Object> transients(Map<String, Object> source) { final Map<String, Object> transients = new HashMap<>(); - if (source.containsKey(CURRENT_SPAN)) { final SpanReference current = (SpanReference) source.get(CURRENT_SPAN); if (current != null) { transients.put(CURRENT_SPAN, new SpanReference(current.getSpan())); } } - return transients; } @Override + public Map<String, Object> transients(Map<String, Object> source, boolean isSystemContext) { + if (isSystemContext == true) { + return Collections.emptyMap(); + } else { + return transients(source); + } + } + + @Override + @SuppressWarnings("removal") public Map<String, String> headers(Map<String, Object> source) { final Map<String, String> headers = new HashMap<>(); @@ -77,6 +87,11 @@ public Map<String, String> headers(Map<String, Object> source) { return headers; } + @Override + public Map<String, String> headers(Map<String, Object> source, boolean isSystemContext) { + return headers(source); + } + Span getCurrentSpan(String key) { SpanReference currentSpanRef = threadContext.getTransient(key); return (currentSpanRef == null) ? null : currentSpanRef.getSpan(); diff --git a/server/src/main/java/org/opensearch/transport/TransportService.java b/server/src/main/java/org/opensearch/transport/TransportService.java index a1697b1898eeb..d50266d8c9e4a 100644 --- a/server/src/main/java/org/opensearch/transport/TransportService.java +++ b/server/src/main/java/org/opensearch/transport/TransportService.java @@ -868,19 +868,10 @@ public final <T extends TransportResponse> void sendRequest( final TransportRequestOptions options, final TransportResponseHandler<T> handler ) { - if (connection == localNodeConnection) { - // See please https://github.com/opensearch-project/OpenSearch/issues/10291 - sendRequestAsync(connection, action, request, options, handler); - } else { - final Span span = tracer.startSpan(SpanBuilder.from(action, connection)); - try (SpanScope spanScope = tracer.withSpanInScope(span)) { - TransportResponseHandler<T> traceableTransportResponseHandler = TraceableTransportResponseHandler.create( - handler, - span, - tracer - ); - sendRequestAsync(connection, action, request, options, traceableTransportResponseHandler); - } + final Span span = tracer.startSpan(SpanBuilder.from(action, connection)); + try (SpanScope spanScope = tracer.withSpanInScope(span)) { + TransportResponseHandler<T> traceableTransportResponseHandler = TraceableTransportResponseHandler.create(handler, span, tracer); + sendRequestAsync(connection, action, request, options, traceableTransportResponseHandler); } } diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java index a0531c76bf897..10669ca1a805b 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java @@ -44,6 +44,8 @@ import java.util.Map; import java.util.function.Supplier; +import org.mockito.Mockito; + import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -740,6 +742,71 @@ public void testMarkAsSystemContext() throws IOException { assertFalse(threadContext.isSystemContext()); } + public void testSystemContextWithPropagator() { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + Map<String, Object> transientHeaderMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, Object> transientHeaderTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, Object> headerMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, String> headerTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + ThreadContext threadContext = new ThreadContext(build); + ThreadContextStatePropagator mockPropagator = Mockito.mock(ThreadContextStatePropagator.class); + Mockito.when(mockPropagator.transients(transientHeaderMap, true)).thenReturn(Collections.emptyMap()); + Mockito.when(mockPropagator.transients(transientHeaderMap, false)).thenReturn(transientHeaderTransformedMap); + + Mockito.when(mockPropagator.headers(headerMap, true)).thenReturn(headerTransformedMap); + Mockito.when(mockPropagator.headers(headerMap, false)).thenReturn(headerTransformedMap); + threadContext.registerThreadContextStatePropagator(mockPropagator); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("test_transient_propagation_key", 1); + assertEquals(Integer.valueOf(1), threadContext.getTransient("test_transient_propagation_key")); + assertEquals("bar", threadContext.getHeader("foo")); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals(Integer.valueOf(1), threadContext.getTransient("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + + public void testSerializeSystemContext() throws IOException { + Settings build = Settings.builder().put("request.headers.default", "1").build(); + Map<String, Object> transientHeaderMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, Object> transientHeaderTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, Object> headerMap = Collections.singletonMap("test_transient_propagation_key", "test"); + Map<String, String> headerTransformedMap = Collections.singletonMap("test_transient_propagation_key", "test"); + ThreadContext threadContext = new ThreadContext(build); + ThreadContextStatePropagator mockPropagator = Mockito.mock(ThreadContextStatePropagator.class); + Mockito.when(mockPropagator.transients(transientHeaderMap, true)).thenReturn(Collections.emptyMap()); + Mockito.when(mockPropagator.transients(transientHeaderMap, false)).thenReturn(transientHeaderTransformedMap); + + Mockito.when(mockPropagator.headers(headerMap, true)).thenReturn(headerTransformedMap); + Mockito.when(mockPropagator.headers(headerMap, false)).thenReturn(headerTransformedMap); + threadContext.registerThreadContextStatePropagator(mockPropagator); + threadContext.putHeader("foo", "bar"); + threadContext.putTransient("test_transient_propagation_key", "test"); + BytesStreamOutput out = new BytesStreamOutput(); + BytesStreamOutput outFromSystemContext = new BytesStreamOutput(); + threadContext.writeTo(out); + try (ThreadContext.StoredContext ctx = threadContext.stashContext()) { + assertEquals("test", threadContext.getTransient("test_transient_propagation_key")); + threadContext.markAsSystemContext(); + threadContext.writeTo(outFromSystemContext); + assertNull(threadContext.getHeader("foo")); + assertNull(threadContext.getTransient("test_transient_propagation_key")); + threadContext.readHeaders(outFromSystemContext.bytes().streamInput()); + assertNull(threadContext.getHeader("test_transient_propagation_key")); + } + assertEquals("test", threadContext.getTransient("test_transient_propagation_key")); + threadContext.readHeaders(out.bytes().streamInput()); + assertEquals("bar", threadContext.getHeader("foo")); + assertEquals("test", threadContext.getHeader("test_transient_propagation_key")); + assertEquals("1", threadContext.getHeader("default")); + } + public void testPutHeaders() { Settings build = Settings.builder().put("request.headers.default", "1").build(); ThreadContext threadContext = new ThreadContext(build); diff --git a/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java b/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java new file mode 100644 index 0000000000000..bfa0d566aabd7 --- /dev/null +++ b/server/src/test/java/org/opensearch/tasks/TaskThreadContextStatePropagatorTests.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.tasks; + +import org.opensearch.test.OpenSearchTestCase; + +import java.util.HashMap; +import java.util.Map; + +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; + +public class TaskThreadContextStatePropagatorTests extends OpenSearchTestCase { + private final TaskThreadContextStatePropagator taskThreadContextStatePropagator = new TaskThreadContextStatePropagator(); + + public void testTransient() { + Map<String, Object> transientHeader = new HashMap<>(); + transientHeader.put(TASK_ID, "t_1"); + Map<String, Object> transientPropagatedHeader = taskThreadContextStatePropagator.transients(transientHeader, false); + assertEquals("t_1", transientPropagatedHeader.get(TASK_ID)); + } + + public void testTransientForSystemContext() { + Map<String, Object> transientHeader = new HashMap<>(); + transientHeader.put(TASK_ID, "t_1"); + Map<String, Object> transientPropagatedHeader = taskThreadContextStatePropagator.transients(transientHeader, true); + assertEquals("t_1", transientPropagatedHeader.get(TASK_ID)); + } +} diff --git a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java index ee816aa5f596d..bf11bcaf39a96 100644 --- a/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java +++ b/server/src/test/java/org/opensearch/telemetry/tracing/ThreadContextBasedTracerContextStorageTests.java @@ -252,4 +252,20 @@ public void run() { assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); } + + public void testSpanNotPropagatedToChildSystemThreadContext() { + final Span span = tracer.startSpan(SpanCreationContext.internal().name("test")); + + try (SpanScope scope = tracer.withSpanInScope(span)) { + try (StoredContext ignored = threadContext.stashContext()) { + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(span)); + threadContext.markAsSystemContext(); + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } + } + + assertThat(threadContext.getTransient(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(not(nullValue()))); + assertThat(threadContextStorage.get(ThreadContextBasedTracerContextStorage.CURRENT_SPAN), is(nullValue())); + } } From 6e2e72ba310409f7009e3801b1fb49c9823f5af1 Mon Sep 17 00:00:00 2001 From: Vikas Bansal <43470111+vikasvb90@users.noreply.github.com> Date: Tue, 9 Jan 2024 23:22:00 +0530 Subject: [PATCH 069/178] Fixed ingest pipeline script issue (#11725) Signed-off-by: vikasvb90 <vikasvb@amazon.com> --- .../ingest/common/ScriptProcessor.java | 7 +- .../ingest/common/ScriptProcessorTests.java | 12 +++ .../test/ingest/190_script_processor.yml | 76 +++++++++++++++++++ .../rest-api-spec/test/ingest/90_simulate.yml | 45 +++++++++++ .../org/opensearch/ingest/IngestDocument.java | 2 + .../ingest/IngestDocumentTests.java | 6 ++ 6 files changed, 146 insertions(+), 2 deletions(-) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java index cc8889af27621..d1b4a0961b7bd 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/ScriptProcessor.java @@ -102,8 +102,11 @@ public IngestDocument execute(IngestDocument document) { } else { ingestScript = precompiledIngestScript; } - ingestScript.execute(document.getSourceAndMetadata()); - CollectionUtils.ensureNoSelfReferences(document.getSourceAndMetadata(), "ingest script"); + IngestDocument mutableDocument = new IngestDocument(document); + ingestScript.execute(mutableDocument.getSourceAndMetadata()); + CollectionUtils.ensureNoSelfReferences(mutableDocument.getSourceAndMetadata(), "ingest script"); + document.getSourceAndMetadata().clear(); + document.getSourceAndMetadata().putAll(mutableDocument.getSourceAndMetadata()); return document; } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java index 96d9be75c4ab7..e900458e361ce 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/ScriptProcessorTests.java @@ -105,4 +105,16 @@ private void assertIngestDocument(IngestDocument ingestDocument) { int bytesTotal = ingestDocument.getFieldValue("bytes_in", Integer.class) + ingestDocument.getFieldValue("bytes_out", Integer.class); assertThat(ingestDocument.getSourceAndMetadata().get("bytes_total"), is(bytesTotal)); } + + public void testScriptingWithSelfReferencingSourceMetadata() { + ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), null, script, null, scriptService); + IngestDocument originalIngestDocument = randomDocument(); + String index = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.INDEX.getFieldName()).toString(); + String id = originalIngestDocument.getSourceAndMetadata().get(IngestDocument.Metadata.ID.getFieldName()).toString(); + Map<String, Object> sourceMetadata = originalIngestDocument.getSourceAndMetadata(); + originalIngestDocument.getSourceAndMetadata().put("_source", sourceMetadata); + IngestDocument ingestDocument = new IngestDocument(index, id, null, null, null, originalIngestDocument.getSourceAndMetadata()); + expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + } + } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml index 3230fb37b43f7..a66f02d6b6a6d 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -202,3 +202,79 @@ teardown: id: 1 - match: { _source.source_field: "foo%20bar" } - match: { _source.target_field: "foo bar" } + +--- +"Test self referencing source with ignore failure": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'", + "ignore_failure": true + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "fooBar", foo: {foo: "bar"}} + + - do: + get: + index: test + id: 1 + - match: { _source.source_field: "fooBar" } + - match: { _source.target_field: "FOOBAR"} + - match: { _source.test-field: null} + +--- +"Test self referencing source without ignoring failure": + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "script" : { + "lang": "painless", + "source" : "ctx.foo['foo']=ctx.foo;ctx['test-field']='test-value'" + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.source_field)" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: bad_request + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: {source_field: "fooBar", foo: {foo: "bar"}} + - match: { error.root_cause.0.type: "illegal_argument_exception" } + - match: { error.root_cause.0.reason: "Iterable object is self-referencing itself (ingest script)" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml index 7c073739f6a1f..edd649a310d42 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/90_simulate.yml @@ -1113,3 +1113,48 @@ teardown: - match: { status: 400 } - match: { error.root_cause.0.type: "illegal_argument_exception" } - match: { error.root_cause.0.reason: "Failed to parse parameter [_if_primary_term], only int or long is accepted" } + +--- +"Test simulate with pipeline with ignore failure and cyclic field assignments in script": + - do: + ingest.simulate: + verbose: true + body: > + { + "pipeline": { + "description": "_description", + "processors": [ + { + "script" : { + "ignore_failure" : true, + "lang": "painless", + "source": "ctx.foo['foo']=ctx.foo;ctx.tag='recursive'" + } + }, + { + "script" : { + "lang": "painless", + "source" : "ctx.target_field = Processors.uppercase(ctx.foo.foo)" + } + } + ] + }, + "docs": [ + { + "_source": { + "foo": { + "foo": "bar" + } + } + } + ] + } + - length: { docs: 1 } + - length: { docs.0.processor_results: 2 } + - match: { docs.0.processor_results.0.status: "error_ignored" } + - match: { docs.0.processor_results.0.ignored_error.error.type: "illegal_argument_exception" } + - match: { docs.0.processor_results.0.doc._source.tag: null } + - match: { docs.0.processor_results.1.doc._source.target_field: "BAR" } + - match: { docs.0.processor_results.1.doc._source.foo.foo: "bar" } + - match: { docs.0.processor_results.1.status: "success" } + - match: { docs.0.processor_results.1.processor_type: "script" } diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index e0de0a9488ad9..10e9e64db561e 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -33,6 +33,7 @@ package org.opensearch.ingest; import org.opensearch.core.common.Strings; +import org.opensearch.core.common.util.CollectionUtils; import org.opensearch.index.VersionType; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.IndexFieldMapper; @@ -752,6 +753,7 @@ public Map<String, Object> getSourceAndMetadata() { @SuppressWarnings("unchecked") public static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) { + CollectionUtils.ensureNoSelfReferences(source, "IngestDocument: Self reference present in object."); return (Map<K, V>) deepCopy(source); } diff --git a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java index 8358dadf9cc3a..be035bc6ef7ea 100644 --- a/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/opensearch/ingest/IngestDocumentTests.java @@ -95,6 +95,12 @@ public void setTestIngestDocument() { ingestDocument = new IngestDocument("index", "id", null, null, null, document); } + public void testSelfReferencingSource() { + Map<String, Object> value = new HashMap<>(); + value.put("foo", value); + expectThrows(IllegalArgumentException.class, () -> IngestDocument.deepCopyMap(value)); + } + public void testSimpleGetFieldValue() { assertThat(ingestDocument.getFieldValue("foo", String.class), equalTo("bar")); assertThat(ingestDocument.getFieldValue("int", Integer.class), equalTo(123)); From ebda9639f4ca98a2181bdcb2c43f82224b1d2a34 Mon Sep 17 00:00:00 2001 From: Sagar <99425694+sgup432@users.noreply.github.com> Date: Tue, 9 Jan 2024 10:57:52 -0800 Subject: [PATCH 070/178] [Tiered caching] Framework changes (#10753) * [Tiered caching] Framework changes Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Added javadoc for new files/packages Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Added changelog Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Fixing javadoc warnings Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Addressing comments Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Addressing additional minor comments Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Moving non null check to builder for OS onHeapCache Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Adding package-info for new packages Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Removing service and adding different cache interfaces along with event listener support Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Fixing gradle missingDoc issue Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Changing listener logic, removing tiered cache integration with IRC Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Adding opensearch.internal tag for LoadAwareCacheLoader Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Fixing thread safety issue Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Remove compute function and event listener logic change for TieredCache Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Making Cache.compute function private Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Adding javadoc and more test for cache.put Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Adding write locks to refresh API as well Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Removing unwanted EventType class and refactoring one UT Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> * Removing TieredCache interface Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> --------- Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> Signed-off-by: Sagar <99425694+sgup432@users.noreply.github.com> --- CHANGELOG.md | 2 + .../org/opensearch/common/cache/Cache.java | 112 +-- .../org/opensearch/common/cache/ICache.java | 34 + .../common/cache/LoadAwareCacheLoader.java | 20 + .../cache/store/OpenSearchOnHeapCache.java | 128 +++ .../common/cache/store/StoreAwareCache.java | 23 + .../StoreAwareCacheRemovalNotification.java | 33 + .../cache/store/StoreAwareCacheValue.java | 35 + .../builders/StoreAwareCacheBuilder.java | 73 ++ .../cache/store/builders/package-info.java | 12 + .../cache/store/enums/CacheStoreType.java | 20 + .../cache/store/enums/package-info.java | 10 + .../StoreAwareCacheEventListener.java | 30 + .../cache/store/listeners/package-info.java | 10 + .../common/cache/store/package-info.java | 10 + .../cache/tier/TieredSpilloverCache.java | 268 ++++++ .../common/cache/tier/package-info.java | 10 + .../indices/IndicesRequestCache.java | 6 +- .../cache/tier/TieredSpilloverCacheTests.java | 786 ++++++++++++++++++ 19 files changed, 1567 insertions(+), 55 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/cache/ICache.java create mode 100644 server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/store/package-info.java create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java create mode 100644 server/src/main/java/org/opensearch/common/cache/tier/package-info.java create mode 100644 server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 6da048c65d8d0..d23a439c35d08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -100,6 +100,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) - Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) - Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) +- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753] + (https://github.com/opensearch-project/OpenSearch/pull/10753)) - [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) - Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247)) - Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) diff --git a/server/src/main/java/org/opensearch/common/cache/Cache.java b/server/src/main/java/org/opensearch/common/cache/Cache.java index 30e7c014a2ec0..d8aa4e93735e6 100644 --- a/server/src/main/java/org/opensearch/common/cache/Cache.java +++ b/server/src/main/java/org/opensearch/common/cache/Cache.java @@ -424,68 +424,74 @@ public V computeIfAbsent(K key, CacheLoader<K, V> loader) throws ExecutionExcept } }); if (value == null) { - // we need to synchronize loading of a value for a given key; however, holding the segment lock while - // invoking load can lead to deadlock against another thread due to dependent key loading; therefore, we - // need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding - // the segment lock; to do this, we atomically put a future in the map that can load the value, and then - // get the value from this future on the thread that won the race to place the future into the segment map - CacheSegment<K, V> segment = getCacheSegment(key); - CompletableFuture<Entry<K, V>> future; - CompletableFuture<Entry<K, V>> completableFuture = new CompletableFuture<>(); + value = compute(key, loader); + } + return value; + } - try (ReleasableLock ignored = segment.writeLock.acquire()) { - future = segment.map.putIfAbsent(key, completableFuture); - } + private V compute(K key, CacheLoader<K, V> loader) throws ExecutionException { + long now = now(); + // we need to synchronize loading of a value for a given key; however, holding the segment lock while + // invoking load can lead to deadlock against another thread due to dependent key loading; therefore, we + // need a mechanism to ensure that load is invoked at most once, but we are not invoking load while holding + // the segment lock; to do this, we atomically put a future in the map that can load the value, and then + // get the value from this future on the thread that won the race to place the future into the segment map + CacheSegment<K, V> segment = getCacheSegment(key); + CompletableFuture<Entry<K, V>> future; + CompletableFuture<Entry<K, V>> completableFuture = new CompletableFuture<>(); - BiFunction<? super Entry<K, V>, Throwable, ? extends V> handler = (ok, ex) -> { - if (ok != null) { - try (ReleasableLock ignored = lruLock.acquire()) { - promote(ok, now); - } - return ok.value; - } else { - try (ReleasableLock ignored = segment.writeLock.acquire()) { - CompletableFuture<Entry<K, V>> sanity = segment.map.get(key); - if (sanity != null && sanity.isCompletedExceptionally()) { - segment.map.remove(key); - } - } - return null; - } - }; + try (ReleasableLock ignored = segment.writeLock.acquire()) { + future = segment.map.putIfAbsent(key, completableFuture); + } - CompletableFuture<V> completableValue; - if (future == null) { - future = completableFuture; - completableValue = future.handle(handler); - V loaded; - try { - loaded = loader.load(key); - } catch (Exception e) { - future.completeExceptionally(e); - throw new ExecutionException(e); - } - if (loaded == null) { - NullPointerException npe = new NullPointerException("loader returned a null value"); - future.completeExceptionally(npe); - throw new ExecutionException(npe); - } else { - future.complete(new Entry<>(key, loaded, now)); + BiFunction<? super Entry<K, V>, Throwable, ? extends V> handler = (ok, ex) -> { + if (ok != null) { + try (ReleasableLock ignored = lruLock.acquire()) { + promote(ok, now); } + return ok.value; } else { - completableValue = future.handle(handler); + try (ReleasableLock ignored = segment.writeLock.acquire()) { + CompletableFuture<Entry<K, V>> sanity = segment.map.get(key); + if (sanity != null && sanity.isCompletedExceptionally()) { + segment.map.remove(key); + } + } + return null; } + }; + CompletableFuture<V> completableValue; + if (future == null) { + future = completableFuture; + completableValue = future.handle(handler); + V loaded; try { - value = completableValue.get(); - // check to ensure the future hasn't been completed with an exception - if (future.isCompletedExceptionally()) { - future.get(); // call get to force the exception to be thrown for other concurrent callers - throw new IllegalStateException("the future was completed exceptionally but no exception was thrown"); - } - } catch (InterruptedException e) { - throw new IllegalStateException(e); + loaded = loader.load(key); + } catch (Exception e) { + future.completeExceptionally(e); + throw new ExecutionException(e); } + if (loaded == null) { + NullPointerException npe = new NullPointerException("loader returned a null value"); + future.completeExceptionally(npe); + throw new ExecutionException(npe); + } else { + future.complete(new Entry<>(key, loaded, now)); + } + } else { + completableValue = future.handle(handler); + } + V value; + try { + value = completableValue.get(); + // check to ensure the future hasn't been completed with an exception + if (future.isCompletedExceptionally()) { + future.get(); // call get to force the exception to be thrown for other concurrent callers + throw new IllegalStateException("the future was completed exceptionally but no exception was thrown"); + } + } catch (InterruptedException e) { + throw new IllegalStateException(e); } return value; } diff --git a/server/src/main/java/org/opensearch/common/cache/ICache.java b/server/src/main/java/org/opensearch/common/cache/ICache.java new file mode 100644 index 0000000000000..c6ea5fca1a8fe --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/ICache.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache; + +/** + * Represents a cache interface. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.experimental + */ +public interface ICache<K, V> { + V get(K key); + + void put(K key, V value); + + V computeIfAbsent(K key, LoadAwareCacheLoader<K, V> loader) throws Exception; + + void invalidate(K key); + + void invalidateAll(); + + Iterable<K> keys(); + + long count(); + + void refresh(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java new file mode 100644 index 0000000000000..57aa4aa39c782 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/LoadAwareCacheLoader.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache; + +/** + * Extends a cache loader with awareness of whether the data is loaded or not. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.internal + */ +public interface LoadAwareCacheLoader<K, V> extends CacheLoader<K, V> { + boolean isLoaded(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java new file mode 100644 index 0000000000000..c497c8dbb7ea9 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/OpenSearchOnHeapCache.java @@ -0,0 +1,128 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.Cache; +import org.opensearch.common.cache.CacheBuilder; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalListener; +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; + +/** + * This variant of on-heap cache uses OpenSearch custom cache implementation. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.experimental + */ +public class OpenSearchOnHeapCache<K, V> implements StoreAwareCache<K, V>, RemovalListener<K, V> { + + private final Cache<K, V> cache; + + private final StoreAwareCacheEventListener<K, V> eventListener; + + public OpenSearchOnHeapCache(Builder<K, V> builder) { + CacheBuilder<K, V> cacheBuilder = CacheBuilder.<K, V>builder() + .setMaximumWeight(builder.getMaxWeightInBytes()) + .weigher(builder.getWeigher()) + .removalListener(this); + if (builder.getExpireAfterAcess() != null) { + cacheBuilder.setExpireAfterAccess(builder.getExpireAfterAcess()); + } + cache = cacheBuilder.build(); + this.eventListener = builder.getEventListener(); + } + + @Override + public V get(K key) { + V value = cache.get(key); + if (value != null) { + eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + } else { + eventListener.onMiss(key, CacheStoreType.ON_HEAP); + } + return value; + } + + @Override + public void put(K key, V value) { + cache.put(key, value); + eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader<K, V> loader) throws Exception { + V value = cache.computeIfAbsent(key, key1 -> loader.load(key)); + if (!loader.isLoaded()) { + eventListener.onHit(key, value, CacheStoreType.ON_HEAP); + } else { + eventListener.onMiss(key, CacheStoreType.ON_HEAP); + eventListener.onCached(key, value, CacheStoreType.ON_HEAP); + } + return value; + } + + @Override + public void invalidate(K key) { + cache.invalidate(key); + } + + @Override + public void invalidateAll() { + cache.invalidateAll(); + } + + @Override + public Iterable<K> keys() { + return cache.keys(); + } + + @Override + public long count() { + return cache.count(); + } + + @Override + public void refresh() { + cache.refresh(); + } + + @Override + public CacheStoreType getTierType() { + return CacheStoreType.ON_HEAP; + } + + @Override + public void onRemoval(RemovalNotification<K, V> notification) { + eventListener.onRemoval( + new StoreAwareCacheRemovalNotification<>( + notification.getKey(), + notification.getValue(), + notification.getRemovalReason(), + CacheStoreType.ON_HEAP + ) + ); + } + + /** + * Builder object + * @param <K> Type of key + * @param <V> Type of value + */ + public static class Builder<K, V> extends StoreAwareCacheBuilder<K, V> { + + @Override + public StoreAwareCache<K, V> build() { + return new OpenSearchOnHeapCache<K, V>(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java new file mode 100644 index 0000000000000..45ca48d94c140 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCache.java @@ -0,0 +1,23 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Represents a cache with a specific type of store like onHeap, disk etc. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.experimental + */ +public interface StoreAwareCache<K, V> extends ICache<K, V> { + CacheStoreType getTierType(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java new file mode 100644 index 0000000000000..492dbff3532a1 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheRemovalNotification.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.RemovalNotification; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Removal notification for store aware cache. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.internal + */ +public class StoreAwareCacheRemovalNotification<K, V> extends RemovalNotification<K, V> { + private final CacheStoreType cacheStoreType; + + public StoreAwareCacheRemovalNotification(K key, V value, RemovalReason removalReason, CacheStoreType cacheStoreType) { + super(key, value, removalReason); + this.cacheStoreType = cacheStoreType; + } + + public CacheStoreType getCacheStoreType() { + return cacheStoreType; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java new file mode 100644 index 0000000000000..4fbbbbfebfaa7 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/StoreAwareCacheValue.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store; + +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * Represents a store aware cache value. + * @param <V> Type of value. + * + * @opensearch.internal + */ +public class StoreAwareCacheValue<V> { + private final V value; + private final CacheStoreType source; + + public StoreAwareCacheValue(V value, CacheStoreType source) { + this.value = value; + this.source = source; + } + + public V getValue() { + return value; + } + + public CacheStoreType getCacheStoreType() { + return source; + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java new file mode 100644 index 0000000000000..fc5aa48aae90f --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/StoreAwareCacheBuilder.java @@ -0,0 +1,73 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.builders; + +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.unit.TimeValue; + +import java.util.function.ToLongBiFunction; + +/** + * Builder for store aware cache. + * @param <K> Type of key. + * @param <V> Type of value. + * + * @opensearch.internal + */ +public abstract class StoreAwareCacheBuilder<K, V> { + + private long maxWeightInBytes; + + private ToLongBiFunction<K, V> weigher; + + private TimeValue expireAfterAcess; + + private StoreAwareCacheEventListener<K, V> eventListener; + + public StoreAwareCacheBuilder() {} + + public StoreAwareCacheBuilder<K, V> setMaximumWeightInBytes(long sizeInBytes) { + this.maxWeightInBytes = sizeInBytes; + return this; + } + + public StoreAwareCacheBuilder<K, V> setWeigher(ToLongBiFunction<K, V> weigher) { + this.weigher = weigher; + return this; + } + + public StoreAwareCacheBuilder<K, V> setExpireAfterAccess(TimeValue expireAfterAcess) { + this.expireAfterAcess = expireAfterAcess; + return this; + } + + public StoreAwareCacheBuilder<K, V> setEventListener(StoreAwareCacheEventListener<K, V> eventListener) { + this.eventListener = eventListener; + return this; + } + + public long getMaxWeightInBytes() { + return maxWeightInBytes; + } + + public TimeValue getExpireAfterAcess() { + return expireAfterAcess; + } + + public ToLongBiFunction<K, V> getWeigher() { + return weigher; + } + + public StoreAwareCacheEventListener<K, V> getEventListener() { + return eventListener; + } + + public abstract StoreAwareCache<K, V> build(); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java new file mode 100644 index 0000000000000..ac4590ae3bff7 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/builders/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Base package for builders. + */ +package org.opensearch.common.cache.store.builders; diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java new file mode 100644 index 0000000000000..04c0825787b66 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/CacheStoreType.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.enums; + +/** + * Cache store types in tiered cache. + * + * @opensearch.internal + */ +public enum CacheStoreType { + + ON_HEAP, + DISK; +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java new file mode 100644 index 0000000000000..7a4e0fa7201fd --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/enums/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Package related to tiered cache enums */ +package org.opensearch.common.cache.store.enums; diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java new file mode 100644 index 0000000000000..6d7e4b39aaf9f --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/listeners/StoreAwareCacheEventListener.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.store.listeners; + +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.enums.CacheStoreType; + +/** + * This can be used to listen to tiered caching events + * @param <K> Type of key + * @param <V> Type of value + * + * @opensearch.internal + */ +public interface StoreAwareCacheEventListener<K, V> { + + void onMiss(K key, CacheStoreType cacheStoreType); + + void onRemoval(StoreAwareCacheRemovalNotification<K, V> notification); + + void onHit(K key, V value, CacheStoreType cacheStoreType); + + void onCached(K key, V value, CacheStoreType cacheStoreType); +} diff --git a/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java new file mode 100644 index 0000000000000..c3222ca3ffb62 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/listeners/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Package related to tiered cache listeners */ +package org.opensearch.common.cache.store.listeners; diff --git a/server/src/main/java/org/opensearch/common/cache/store/package-info.java b/server/src/main/java/org/opensearch/common/cache/store/package-info.java new file mode 100644 index 0000000000000..edc1ecd7d5e7a --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/store/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for store aware caches. */ +package org.opensearch.common.cache.store; diff --git a/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java new file mode 100644 index 0000000000000..8b432c9484aed --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/TieredSpilloverCache.java @@ -0,0 +1,268 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.cache.ICache; +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.StoreAwareCacheValue; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.util.concurrent.ReleasableLock; +import org.opensearch.common.util.iterable.Iterables; + +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; +import java.util.function.Function; + +/** + * This cache spillover the evicted items from heap tier to disk tier. All the new items are first cached on heap + * and the items evicted from on heap cache are moved to disk based cache. If disk based cache also gets full, + * then items are eventually evicted from it and removed which will result in cache miss. + * + * @param <K> Type of key + * @param <V> Type of value + * + * @opensearch.experimental + */ +public class TieredSpilloverCache<K, V> implements ICache<K, V>, StoreAwareCacheEventListener<K, V> { + + // TODO: Remove optional when diskCache implementation is integrated. + private final Optional<StoreAwareCache<K, V>> onDiskCache; + private final StoreAwareCache<K, V> onHeapCache; + private final StoreAwareCacheEventListener<K, V> listener; + ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + ReleasableLock readLock = new ReleasableLock(readWriteLock.readLock()); + ReleasableLock writeLock = new ReleasableLock(readWriteLock.writeLock()); + + /** + * Maintains caching tiers in ascending order of cache latency. + */ + private final List<StoreAwareCache<K, V>> cacheList; + + TieredSpilloverCache(Builder<K, V> builder) { + Objects.requireNonNull(builder.onHeapCacheBuilder, "onHeap cache builder can't be null"); + this.onHeapCache = builder.onHeapCacheBuilder.setEventListener(this).build(); + if (builder.onDiskCacheBuilder != null) { + this.onDiskCache = Optional.of(builder.onDiskCacheBuilder.setEventListener(this).build()); + } else { + this.onDiskCache = Optional.empty(); + } + this.listener = builder.listener; + this.cacheList = this.onDiskCache.map(diskTier -> Arrays.asList(this.onHeapCache, diskTier)).orElse(List.of(this.onHeapCache)); + } + + // Package private for testing + StoreAwareCache<K, V> getOnHeapCache() { + return onHeapCache; + } + + // Package private for testing + Optional<StoreAwareCache<K, V>> getOnDiskCache() { + return onDiskCache; + } + + @Override + public V get(K key) { + StoreAwareCacheValue<V> cacheValue = getValueFromTieredCache(true).apply(key); + if (cacheValue == null) { + return null; + } + return cacheValue.getValue(); + } + + @Override + public void put(K key, V value) { + try (ReleasableLock ignore = writeLock.acquire()) { + onHeapCache.put(key, value); + listener.onCached(key, value, CacheStoreType.ON_HEAP); + } + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader<K, V> loader) throws Exception { + // We are skipping calling event listeners at this step as we do another get inside below computeIfAbsent. + // Where we might end up calling onMiss twice for a key not present in onHeap cache. + // Similary we might end up calling both onMiss and onHit for a key, in case we are receiving concurrent + // requests for the same key which requires loading only once. + StoreAwareCacheValue<V> cacheValue = getValueFromTieredCache(false).apply(key); + if (cacheValue == null) { + // Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside. + // This is needed as there can be many requests for the same key at the same time and we only want to load + // the value once. + V value = null; + try (ReleasableLock ignore = writeLock.acquire()) { + value = onHeapCache.computeIfAbsent(key, loader); + } + if (loader.isLoaded()) { + listener.onMiss(key, CacheStoreType.ON_HEAP); + onDiskCache.ifPresent(diskTier -> listener.onMiss(key, CacheStoreType.DISK)); + listener.onCached(key, value, CacheStoreType.ON_HEAP); + } else { + listener.onHit(key, value, CacheStoreType.ON_HEAP); + } + return value; + } + listener.onHit(key, cacheValue.getValue(), cacheValue.getCacheStoreType()); + if (cacheValue.getCacheStoreType().equals(CacheStoreType.DISK)) { + listener.onMiss(key, CacheStoreType.ON_HEAP); + } + return cacheValue.getValue(); + } + + @Override + public void invalidate(K key) { + // We are trying to invalidate the key from all caches though it would be present in only of them. + // Doing this as we don't know where it is located. We could do a get from both and check that, but what will + // also trigger a hit/miss listener event, so ignoring it for now. + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache<K, V> storeAwareCache : cacheList) { + storeAwareCache.invalidate(key); + } + } + } + + @Override + public void invalidateAll() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache<K, V> storeAwareCache : cacheList) { + storeAwareCache.invalidateAll(); + } + } + } + + /** + * Provides an iteration over both onHeap and disk keys. This is not protected from any mutations to the cache. + * @return An iterable over (onHeap + disk) keys + */ + @Override + public Iterable<K> keys() { + Iterable<K> onDiskKeysIterable; + if (onDiskCache.isPresent()) { + onDiskKeysIterable = onDiskCache.get().keys(); + } else { + onDiskKeysIterable = Collections::emptyIterator; + } + return Iterables.concat(onHeapCache.keys(), onDiskKeysIterable); + } + + @Override + public long count() { + long totalCount = 0; + for (StoreAwareCache<K, V> storeAwareCache : cacheList) { + totalCount += storeAwareCache.count(); + } + return totalCount; + } + + @Override + public void refresh() { + try (ReleasableLock ignore = writeLock.acquire()) { + for (StoreAwareCache<K, V> storeAwareCache : cacheList) { + storeAwareCache.refresh(); + } + } + } + + @Override + public void onMiss(K key, CacheStoreType cacheStoreType) { + // Misses for tiered cache are tracked here itself. + } + + @Override + public void onRemoval(StoreAwareCacheRemovalNotification<K, V> notification) { + if (RemovalReason.EVICTED.equals(notification.getRemovalReason()) + || RemovalReason.CAPACITY.equals(notification.getRemovalReason())) { + switch (notification.getCacheStoreType()) { + case ON_HEAP: + try (ReleasableLock ignore = writeLock.acquire()) { + onDiskCache.ifPresent(diskTier -> { diskTier.put(notification.getKey(), notification.getValue()); }); + } + onDiskCache.ifPresent( + diskTier -> listener.onCached(notification.getKey(), notification.getValue(), CacheStoreType.DISK) + ); + break; + default: + break; + } + } + listener.onRemoval(notification); + } + + @Override + public void onHit(K key, V value, CacheStoreType cacheStoreType) { + // Hits for tiered cache are tracked here itself. + } + + @Override + public void onCached(K key, V value, CacheStoreType cacheStoreType) { + // onCached events for tiered cache are tracked here itself. + } + + private Function<K, StoreAwareCacheValue<V>> getValueFromTieredCache(boolean triggerEventListener) { + return key -> { + try (ReleasableLock ignore = readLock.acquire()) { + for (StoreAwareCache<K, V> storeAwareCache : cacheList) { + V value = storeAwareCache.get(key); + if (value != null) { + if (triggerEventListener) { + listener.onHit(key, value, storeAwareCache.getTierType()); + } + return new StoreAwareCacheValue<>(value, storeAwareCache.getTierType()); + } else { + if (triggerEventListener) { + listener.onMiss(key, storeAwareCache.getTierType()); + } + } + } + } + return null; + }; + } + + /** + * Builder object for tiered spillover cache. + * @param <K> Type of key + * @param <V> Type of value + */ + public static class Builder<K, V> { + private StoreAwareCacheBuilder<K, V> onHeapCacheBuilder; + private StoreAwareCacheBuilder<K, V> onDiskCacheBuilder; + private StoreAwareCacheEventListener<K, V> listener; + + public Builder() {} + + public Builder<K, V> setOnHeapCacheBuilder(StoreAwareCacheBuilder<K, V> onHeapCacheBuilder) { + this.onHeapCacheBuilder = onHeapCacheBuilder; + return this; + } + + public Builder<K, V> setOnDiskCacheBuilder(StoreAwareCacheBuilder<K, V> onDiskCacheBuilder) { + this.onDiskCacheBuilder = onDiskCacheBuilder; + return this; + } + + public Builder<K, V> setListener(StoreAwareCacheEventListener<K, V> listener) { + this.listener = listener; + return this; + } + + public TieredSpilloverCache<K, V> build() { + return new TieredSpilloverCache<>(this); + } + } +} diff --git a/server/src/main/java/org/opensearch/common/cache/tier/package-info.java b/server/src/main/java/org/opensearch/common/cache/tier/package-info.java new file mode 100644 index 0000000000000..7ad81dbe3073c --- /dev/null +++ b/server/src/main/java/org/opensearch/common/cache/tier/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** Base package for cache tier support. */ +package org.opensearch.common.cache.tier; diff --git a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java index 629cea102a8b2..4a19f8eb8714d 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java @@ -247,7 +247,7 @@ interface CacheEntity extends Accountable { * * @opensearch.internal */ - static class Key implements Accountable { + public static class Key implements Accountable { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); public final CacheEntity entity; // use as identity equality @@ -328,6 +328,9 @@ public int hashCode() { } } + /** + * Logic to clean up in-memory cache. + */ synchronized void cleanCache() { final Set<CleanupKey> currentKeysToClean = new HashSet<>(); final Set<Object> currentFullClean = new HashSet<>(); @@ -355,7 +358,6 @@ synchronized void cleanCache() { } } } - cache.refresh(); } diff --git a/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java new file mode 100644 index 0000000000000..eb75244c6f8b1 --- /dev/null +++ b/server/src/test/java/org/opensearch/common/cache/tier/TieredSpilloverCacheTests.java @@ -0,0 +1,786 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.cache.tier; + +import org.opensearch.common.cache.LoadAwareCacheLoader; +import org.opensearch.common.cache.RemovalReason; +import org.opensearch.common.cache.store.OpenSearchOnHeapCache; +import org.opensearch.common.cache.store.StoreAwareCache; +import org.opensearch.common.cache.store.StoreAwareCacheRemovalNotification; +import org.opensearch.common.cache.store.builders.StoreAwareCacheBuilder; +import org.opensearch.common.cache.store.enums.CacheStoreType; +import org.opensearch.common.cache.store.listeners.StoreAwareCacheEventListener; +import org.opensearch.common.metrics.CounterMetric; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.EnumMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +public class TieredSpilloverCacheTests extends OpenSearchTestCase { + + public void testComputeIfAbsentWithoutAnyOnHeapCacheEviction() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<String, String>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + randomIntBetween(1, 4), + eventListener, + 0 + ); + int numOfItems1 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + List<String> keys = new ArrayList<>(); + // Put values in cache. + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + keys.add(key); + LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(1, onHeapCacheSize / 2 - 1); + int cacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { + // Hit cache with stored key + cacheHit++; + int index = randomIntBetween(0, keys.size() - 1); + tieredSpilloverCache.computeIfAbsent(keys.get(index), getLoadAwareCacheLoader()); + } else { + // Hit cache with randomized key which is expected to miss cache always. + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), getLoadAwareCacheLoader()); + cacheMiss++; + } + } + assertEquals(cacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(numOfItems1 + cacheMiss, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + } + + public void testComputeIfAbsentWithEvictionsFromOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<String, String>(); + StoreAwareCacheBuilder<String, String> cacheBuilder = new OpenSearchOnHeapCache.Builder<String, String>().setMaximumWeightInBytes( + onHeapCacheSize * 50 + ).setWeigher((k, v) -> 50); // Will support onHeapCacheSize entries. + + StoreAwareCacheBuilder<String, String> diskCacheBuilder = new MockOnDiskCache.Builder<String, String>().setMaxSize(diskCacheSize) + .setDeliberateDelay(0); + + TieredSpilloverCache<String, String> tieredSpilloverCache = new TieredSpilloverCache.Builder<String, String>() + .setOnHeapCacheBuilder(cacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List<String> onHeapKeys = new ArrayList<>(); + List<String> diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + long actualDiskCacheSize = tieredSpilloverCache.getOnDiskCache().get().count(); + assertEquals(numOfItems1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + + assertEquals( + eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count(), + eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count() + ); + assertEquals(actualDiskCacheSize, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); + + tieredSpilloverCache.getOnHeapCache().keys().forEach(onHeapKeys::add); + tieredSpilloverCache.getOnDiskCache().get().keys().forEach(diskTierKeys::add); + + assertEquals(tieredSpilloverCache.getOnHeapCache().count(), onHeapKeys.size()); + assertEquals(tieredSpilloverCache.getOnDiskCache().get().count(), diskTierKeys.size()); + + // Try to hit cache again with some randomization. + int numOfItems2 = randomIntBetween(50, 200); + int onHeapCacheHit = 0; + int diskCacheHit = 0; + int cacheMiss = 0; + for (int iter = 0; iter < numOfItems2; iter++) { + if (randomBoolean()) { // Hit cache with key stored in onHeap cache. + onHeapCacheHit++; + int index = randomIntBetween(0, onHeapKeys.size() - 1); + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(onHeapKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } else { // Hit cache with key stored in disk cache. + diskCacheHit++; + int index = randomIntBetween(0, diskTierKeys.size() - 1); + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(diskTierKeys.get(index), loadAwareCacheLoader); + assertFalse(loadAwareCacheLoader.isLoaded()); + } + } + for (int iter = 0; iter < randomIntBetween(50, 200); iter++) { + // Hit cache with randomized key which is expected to miss cache always. + LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + cacheMiss++; + } + // On heap cache misses would also include diskCacheHits as it means it missed onHeap cache. + assertEquals(numOfItems1 + cacheMiss + diskCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).missCount.count()); + assertEquals(onHeapCacheHit, eventListener.enumMap.get(CacheStoreType.ON_HEAP).hitCount.count()); + assertEquals(cacheMiss + numOfItems1, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); + assertEquals(diskCacheHit, eventListener.enumMap.get(CacheStoreType.DISK).hitCount.count()); + } + + public void testComputeIfAbsentWithEvictionsFromBothTier() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<String, String>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numOfItems = randomIntBetween(totalSize + 1, totalSize * 3); + for (int iter = 0; iter < numOfItems; iter++) { + LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), tieredCacheLoader); + } + assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); + assertTrue(eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count() > 0); + } + + public void testGetAndCount() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<String, String>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List<String> onHeapKeys = new ArrayList<>(); + List<String> diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } + + for (int iter = 0; iter < numOfItems1; iter++) { + if (randomBoolean()) { + if (randomBoolean()) { + int index = randomIntBetween(0, onHeapKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(onHeapKeys.get(index))); + } else { + int index = randomIntBetween(0, diskTierKeys.size() - 1); + assertNotNull(tieredSpilloverCache.get(diskTierKeys.get(index))); + } + } else { + assertNull(tieredSpilloverCache.get(UUID.randomUUID().toString())); + } + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + } + + public void testWithDiskTierNull() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<String, String>(); + + StoreAwareCacheBuilder<String, String> onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder<String, String>() + .setMaximumWeightInBytes(onHeapCacheSize * 20) + .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries + TieredSpilloverCache<String, String> tieredSpilloverCache = new TieredSpilloverCache.Builder<String, String>() + .setOnHeapCacheBuilder(onHeapCacheBuilder) + .setListener(eventListener) + .build(); + + int numOfItems = randomIntBetween(onHeapCacheSize + 1, onHeapCacheSize * 3); + for (int iter = 0; iter < numOfItems; iter++) { + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), loadAwareCacheLoader); + } + assertTrue(eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count() > 0); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).cachedCount.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).evictionsMetric.count()); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.DISK).missCount.count()); + } + + public void testPut() { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(onHeapCacheSize + 1, 100); + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key, value); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).cachedCount.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testPutAndVerifyNewItemsArePresentOnHeapCache() throws Exception { + int onHeapCacheSize = randomIntBetween(200, 400); + int diskCacheSize = randomIntBetween(450, 800); + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + for (int i = 0; i < onHeapCacheSize; i++) { + tieredSpilloverCache.computeIfAbsent(UUID.randomUUID().toString(), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) throws Exception { + return UUID.randomUUID().toString(); + } + }); + } + + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(0, tieredSpilloverCache.getOnDiskCache().get().count()); + + // Again try to put OnHeap cache capacity amount of new items. + List<String> newKeyList = new ArrayList<>(); + for (int i = 0; i < onHeapCacheSize; i++) { + newKeyList.add(UUID.randomUUID().toString()); + } + + for (int i = 0; i < newKeyList.size(); i++) { + tieredSpilloverCache.computeIfAbsent(newKeyList.get(i), new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + } + + // Verify that new items are part of onHeap cache. + List<String> actualOnHeapCacheKeys = new ArrayList<>(); + tieredSpilloverCache.getOnHeapCache().keys().forEach(actualOnHeapCacheKeys::add); + + assertEquals(newKeyList.size(), actualOnHeapCacheKeys.size()); + for (int i = 0; i < actualOnHeapCacheKeys.size(); i++) { + assertTrue(newKeyList.contains(actualOnHeapCacheKeys.get(i))); + } + + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(onHeapCacheSize, tieredSpilloverCache.getOnDiskCache().get().count()); + } + + public void testInvalidate() { + int onHeapCacheSize = 1; + int diskCacheSize = 10; + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + // First try to invalidate without the key present in cache. + tieredSpilloverCache.invalidate(key); + assertEquals(0, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + + // Now try to invalidate with the key present in onHeap cache. + tieredSpilloverCache.put(key, value); + tieredSpilloverCache.invalidate(key); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).invalidationMetric.count()); + assertEquals(0, tieredSpilloverCache.count()); + + tieredSpilloverCache.put(key, value); + // Put another key/value so that one of the item is evicted to disk cache. + String key2 = UUID.randomUUID().toString(); + tieredSpilloverCache.put(key2, UUID.randomUUID().toString()); + assertEquals(2, tieredSpilloverCache.count()); + // Again invalidate older key + tieredSpilloverCache.invalidate(key); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.DISK).invalidationMetric.count()); + assertEquals(1, tieredSpilloverCache.count()); + } + + public void testCacheKeys() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + List<String> onHeapKeys = new ArrayList<>(); + List<String> diskTierKeys = new ArrayList<>(); + // During first round add onHeapCacheSize entries. Will go to onHeap cache initially. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + diskTierKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + // In another round, add another onHeapCacheSize entries. These will go to onHeap and above ones will be + // evicted to onDisk cache. + for (int i = 0; i < onHeapCacheSize; i++) { + String key = UUID.randomUUID().toString(); + onHeapKeys.add(key); + tieredSpilloverCache.computeIfAbsent(key, getLoadAwareCacheLoader()); + } + + List<String> actualOnHeapKeys = new ArrayList<>(); + List<String> actualOnDiskKeys = new ArrayList<>(); + Iterable<String> onHeapiterable = tieredSpilloverCache.getOnHeapCache().keys(); + Iterable<String> onDiskiterable = tieredSpilloverCache.getOnDiskCache().get().keys(); + onHeapiterable.iterator().forEachRemaining(actualOnHeapKeys::add); + onDiskiterable.iterator().forEachRemaining(actualOnDiskKeys::add); + for (String onHeapKey : onHeapKeys) { + assertTrue(actualOnHeapKeys.contains(onHeapKey)); + } + for (String onDiskKey : actualOnDiskKeys) { + assertTrue(actualOnDiskKeys.contains(onDiskKey)); + } + + // Testing keys() which returns all keys. + List<String> actualMergedKeys = new ArrayList<>(); + List<String> expectedMergedKeys = new ArrayList<>(); + expectedMergedKeys.addAll(onHeapKeys); + expectedMergedKeys.addAll(diskTierKeys); + + Iterable<String> mergedIterable = tieredSpilloverCache.keys(); + mergedIterable.iterator().forEachRemaining(actualMergedKeys::add); + + assertEquals(expectedMergedKeys.size(), actualMergedKeys.size()); + for (String key : expectedMergedKeys) { + assertTrue(actualMergedKeys.contains(key)); + } + } + + public void testRefresh() { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + tieredSpilloverCache.refresh(); + } + + public void testInvalidateAll() throws Exception { + int onHeapCacheSize = randomIntBetween(10, 30); + int diskCacheSize = randomIntBetween(60, 100); + int totalSize = onHeapCacheSize + diskCacheSize; + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + // Put values in cache more than it's size and cause evictions from onHeap. + int numOfItems1 = randomIntBetween(onHeapCacheSize + 1, totalSize); + List<String> onHeapKeys = new ArrayList<>(); + List<String> diskTierKeys = new ArrayList<>(); + for (int iter = 0; iter < numOfItems1; iter++) { + String key = UUID.randomUUID().toString(); + if (iter > (onHeapCacheSize - 1)) { + // All these are bound to go to disk based cache. + diskTierKeys.add(key); + } else { + onHeapKeys.add(key); + } + LoadAwareCacheLoader<String, String> tieredCacheLoader = getLoadAwareCacheLoader(); + tieredSpilloverCache.computeIfAbsent(key, tieredCacheLoader); + } + assertEquals(numOfItems1, tieredSpilloverCache.count()); + tieredSpilloverCache.invalidateAll(); + assertEquals(0, tieredSpilloverCache.count()); + } + + public void testComputeIfAbsentConcurrently() throws Exception { + int onHeapCacheSize = randomIntBetween(100, 300); + int diskCacheSize = randomIntBetween(200, 400); + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + + TieredSpilloverCache<String, String> tieredSpilloverCache = intializeTieredSpilloverCache( + onHeapCacheSize, + diskCacheSize, + eventListener, + 0 + ); + + int numberOfSameKeys = randomIntBetween(10, onHeapCacheSize - 1); + String key = UUID.randomUUID().toString(); + String value = UUID.randomUUID().toString(); + + Thread[] threads = new Thread[numberOfSameKeys]; + Phaser phaser = new Phaser(numberOfSameKeys + 1); + CountDownLatch countDownLatch = new CountDownLatch(numberOfSameKeys); // To wait for all threads to finish. + + List<LoadAwareCacheLoader<String, String>> loadAwareCacheLoaderList = new CopyOnWriteArrayList<>(); + + for (int i = 0; i < numberOfSameKeys; i++) { + threads[i] = new Thread(() -> { + try { + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = new LoadAwareCacheLoader() { + boolean isLoaded = false; + + @Override + public boolean isLoaded() { + return isLoaded; + } + + @Override + public Object load(Object key) throws Exception { + isLoaded = true; + return value; + } + }; + loadAwareCacheLoaderList.add(loadAwareCacheLoader); + phaser.arriveAndAwaitAdvance(); + tieredSpilloverCache.computeIfAbsent(key, loadAwareCacheLoader); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); // Wait for rest of tasks to be cancelled. + int numberOfTimesKeyLoaded = 0; + assertEquals(numberOfSameKeys, loadAwareCacheLoaderList.size()); + for (int i = 0; i < loadAwareCacheLoaderList.size(); i++) { + LoadAwareCacheLoader<String, String> loader = loadAwareCacheLoaderList.get(i); + if (loader.isLoaded()) { + numberOfTimesKeyLoaded++; + } + } + assertEquals(1, numberOfTimesKeyLoaded); // It should be loaded only once. + } + + public void testConcurrencyForEvictionFlow() throws Exception { + int diskCacheSize = randomIntBetween(450, 800); + + MockCacheEventListener<String, String> eventListener = new MockCacheEventListener<>(); + + StoreAwareCacheBuilder<String, String> cacheBuilder = new OpenSearchOnHeapCache.Builder<String, String>().setMaximumWeightInBytes( + 200 + ).setWeigher((k, v) -> 150); + + StoreAwareCacheBuilder<String, String> diskCacheBuilder = new MockOnDiskCache.Builder<String, String>().setMaxSize(diskCacheSize) + .setDeliberateDelay(500); + + TieredSpilloverCache<String, String> tieredSpilloverCache = new TieredSpilloverCache.Builder<String, String>() + .setOnHeapCacheBuilder(cacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + + String keyToBeEvicted = "key1"; + String secondKey = "key2"; + + // Put first key on tiered cache. Will go into onHeap cache. + tieredSpilloverCache.computeIfAbsent(keyToBeEvicted, new LoadAwareCacheLoader<>() { + @Override + public boolean isLoaded() { + return false; + } + + @Override + public String load(String key) { + return UUID.randomUUID().toString(); + } + }); + CountDownLatch countDownLatch = new CountDownLatch(1); + CountDownLatch countDownLatch1 = new CountDownLatch(1); + // Put second key on tiered cache. Will cause eviction of first key from onHeap cache and should go into + // disk cache. + LoadAwareCacheLoader<String, String> loadAwareCacheLoader = getLoadAwareCacheLoader(); + Thread thread = new Thread(() -> { + try { + tieredSpilloverCache.computeIfAbsent(secondKey, loadAwareCacheLoader); + countDownLatch1.countDown(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + thread.start(); + assertBusy(() -> { assertTrue(loadAwareCacheLoader.isLoaded()); }, 100, TimeUnit.MILLISECONDS); // We wait for new key to be loaded + // after which it eviction flow is + // guaranteed to occur. + StoreAwareCache<String, String> onDiskCache = tieredSpilloverCache.getOnDiskCache().get(); + + // Now on a different thread, try to get key(above one which got evicted) from tiered cache. We expect this + // should return not null value as it should be present on diskCache. + AtomicReference<String> actualValue = new AtomicReference<>(); + Thread thread1 = new Thread(() -> { + try { + actualValue.set(tieredSpilloverCache.get(keyToBeEvicted)); + } catch (Exception e) { + throw new RuntimeException(e); + } + countDownLatch.countDown(); + }); + thread1.start(); + countDownLatch.await(); + assertNotNull(actualValue.get()); + countDownLatch1.await(); + assertEquals(1, eventListener.enumMap.get(CacheStoreType.ON_HEAP).evictionsMetric.count()); + assertEquals(1, tieredSpilloverCache.getOnHeapCache().count()); + assertEquals(1, onDiskCache.count()); + assertNotNull(onDiskCache.get(keyToBeEvicted)); + } + + class MockCacheEventListener<K, V> implements StoreAwareCacheEventListener<K, V> { + + EnumMap<CacheStoreType, TestStatsHolder> enumMap = new EnumMap<>(CacheStoreType.class); + + MockCacheEventListener() { + for (CacheStoreType cacheStoreType : CacheStoreType.values()) { + enumMap.put(cacheStoreType, new TestStatsHolder()); + } + } + + @Override + public void onMiss(K key, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).missCount.inc(); + } + + @Override + public void onRemoval(StoreAwareCacheRemovalNotification<K, V> notification) { + if (notification.getRemovalReason().equals(RemovalReason.EVICTED)) { + enumMap.get(notification.getCacheStoreType()).evictionsMetric.inc(); + } else if (notification.getRemovalReason().equals(RemovalReason.INVALIDATED)) { + enumMap.get(notification.getCacheStoreType()).invalidationMetric.inc(); + } + } + + @Override + public void onHit(K key, V value, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).hitCount.inc(); + } + + @Override + public void onCached(K key, V value, CacheStoreType cacheStoreType) { + enumMap.get(cacheStoreType).cachedCount.inc(); + } + + class TestStatsHolder { + final CounterMetric evictionsMetric = new CounterMetric(); + final CounterMetric hitCount = new CounterMetric(); + final CounterMetric missCount = new CounterMetric(); + final CounterMetric cachedCount = new CounterMetric(); + final CounterMetric invalidationMetric = new CounterMetric(); + } + } + + private LoadAwareCacheLoader<String, String> getLoadAwareCacheLoader() { + return new LoadAwareCacheLoader<String, String>() { + boolean isLoaded = false; + + @Override + public String load(String key) { + isLoaded = true; + return UUID.randomUUID().toString(); + } + + @Override + public boolean isLoaded() { + return isLoaded; + } + }; + } + + private TieredSpilloverCache<String, String> intializeTieredSpilloverCache( + int onHeapCacheSize, + int diksCacheSize, + StoreAwareCacheEventListener<String, String> eventListener, + long diskDeliberateDelay + ) { + StoreAwareCacheBuilder<String, String> diskCacheBuilder = new MockOnDiskCache.Builder<String, String>().setMaxSize(diksCacheSize) + .setDeliberateDelay(diskDeliberateDelay); + StoreAwareCacheBuilder<String, String> onHeapCacheBuilder = new OpenSearchOnHeapCache.Builder<String, String>() + .setMaximumWeightInBytes(onHeapCacheSize * 20) + .setWeigher((k, v) -> 20); // Will support upto onHeapCacheSize entries + return new TieredSpilloverCache.Builder<String, String>().setOnHeapCacheBuilder(onHeapCacheBuilder) + .setOnDiskCacheBuilder(diskCacheBuilder) + .setListener(eventListener) + .build(); + } +} + +class MockOnDiskCache<K, V> implements StoreAwareCache<K, V> { + + Map<K, V> cache; + int maxSize; + + long delay; + StoreAwareCacheEventListener<K, V> eventListener; + + MockOnDiskCache(int maxSize, StoreAwareCacheEventListener<K, V> eventListener, long delay) { + this.maxSize = maxSize; + this.eventListener = eventListener; + this.delay = delay; + this.cache = new ConcurrentHashMap<K, V>(); + } + + @Override + public V get(K key) { + V value = cache.get(key); + if (value != null) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + } + return value; + } + + @Override + public void put(K key, V value) { + if (this.cache.size() >= maxSize) { // For simplification + eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, value, RemovalReason.EVICTED, CacheStoreType.DISK)); + return; + } + try { + Thread.sleep(delay); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + this.cache.put(key, value); + eventListener.onCached(key, value, CacheStoreType.DISK); + } + + @Override + public V computeIfAbsent(K key, LoadAwareCacheLoader<K, V> loader) throws Exception { + V value = cache.computeIfAbsent(key, key1 -> { + try { + return loader.load(key); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + if (!loader.isLoaded()) { + eventListener.onHit(key, value, CacheStoreType.DISK); + } else { + eventListener.onMiss(key, CacheStoreType.DISK); + eventListener.onCached(key, value, CacheStoreType.DISK); + } + return value; + } + + @Override + public void invalidate(K key) { + if (this.cache.containsKey(key)) { + eventListener.onRemoval(new StoreAwareCacheRemovalNotification<>(key, null, RemovalReason.INVALIDATED, CacheStoreType.DISK)); + } + this.cache.remove(key); + } + + @Override + public void invalidateAll() { + this.cache.clear(); + } + + @Override + public Iterable<K> keys() { + return this.cache.keySet(); + } + + @Override + public long count() { + return this.cache.size(); + } + + @Override + public void refresh() {} + + @Override + public CacheStoreType getTierType() { + return CacheStoreType.DISK; + } + + public static class Builder<K, V> extends StoreAwareCacheBuilder<K, V> { + + int maxSize; + long delay; + + @Override + public StoreAwareCache<K, V> build() { + return new MockOnDiskCache<K, V>(maxSize, this.getEventListener(), delay); + } + + public Builder<K, V> setMaxSize(int maxSize) { + this.maxSize = maxSize; + return this; + } + + public Builder<K, V> setDeliberateDelay(long millis) { + this.delay = millis; + return this; + } + } +} From a9ce180b57dc20408e5a36337dbdc85d6e1fd306 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Tue, 9 Jan 2024 11:27:06 -0800 Subject: [PATCH 071/178] Unmute testNoFailuresOnFileReads flaky test (#11824) Signed-off-by: Suraj Singh <surajrider@gmail.com> --- .../java/org/opensearch/index/shard/RemoteIndexShardTests.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java index eacc504428ef1..57a561bc8f2a3 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteIndexShardTests.java @@ -465,7 +465,6 @@ public void onReplicationFailure( * blocking update of reader. Once this is done, it corrupts one segment file and ensure that file is deleted in next * round of segment replication by ensuring doc count. */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/10885") public void testNoFailuresOnFileReads() throws Exception { try (ReplicationGroup shards = createGroup(1, getIndexSettings(), new NRTReplicationEngineFactory())) { shards.startAll(); From 4a42150ba0cbf10022797c038f35faaea2f548f6 Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Tue, 9 Jan 2024 18:17:47 -0800 Subject: [PATCH 072/178] Bump to Lucene99 (#11421) Upgraded Lucene dependency to 9.9.1. --------- Signed-off-by: Marc Handalian <marc.handalian@gmail.com> Signed-off-by: Marc Handalian <handalm@amazon.com> --- CHANGELOG.md | 1 + buildSrc/version.properties | 2 +- libs/core/licenses/lucene-core-9.8.0.jar.sha1 | 1 - libs/core/licenses/lucene-core-9.9.1.jar.sha1 | 1 + .../src/main/java/org/opensearch/Version.java | 2 +- .../lucene-expressions-9.8.0.jar.sha1 | 1 - .../lucene-expressions-9.9.1.jar.sha1 | 1 + .../mapper/ScaledFloatFieldTypeTests.java | 31 ++++++++------- .../Netty4HttpPipeliningHandlerTests.java | 4 +- .../lucene-analysis-icu-9.8.0.jar.sha1 | 1 - .../lucene-analysis-icu-9.9.1.jar.sha1 | 1 + .../lucene-analysis-kuromoji-9.8.0.jar.sha1 | 1 - .../lucene-analysis-kuromoji-9.9.1.jar.sha1 | 1 + .../analysis/KuromojiTokenizerFactory.java | 2 +- .../lucene-analysis-nori-9.8.0.jar.sha1 | 1 - .../lucene-analysis-nori-9.9.1.jar.sha1 | 1 + .../lucene-analysis-phonetic-9.8.0.jar.sha1 | 1 - .../lucene-analysis-phonetic-9.9.1.jar.sha1 | 1 + .../lucene-analysis-smartcn-9.8.0.jar.sha1 | 1 - .../lucene-analysis-smartcn-9.9.1.jar.sha1 | 1 + .../lucene-analysis-stempel-9.8.0.jar.sha1 | 1 - .../lucene-analysis-stempel-9.9.1.jar.sha1 | 1 + .../lucene-analysis-morfologik-9.8.0.jar.sha1 | 1 - .../lucene-analysis-morfologik-9.9.1.jar.sha1 | 1 + .../index/codec/CorrelationCodecVersion.java | 12 +++--- .../CorrelationCodec.java | 4 +- .../PerFieldCorrelationVectorsFormat.java | 12 +++--- .../package-info.java | 2 +- .../services/org.apache.lucene.codecs.Codec | 2 +- .../CorrelationCodecTests.java | 6 +-- .../nio/NioHttpPipeliningHandlerTests.java | 4 +- .../lucene-analysis-common-9.8.0.jar.sha1 | 1 - .../lucene-analysis-common-9.9.1.jar.sha1 | 1 + .../lucene-backward-codecs-9.8.0.jar.sha1 | 1 - .../lucene-backward-codecs-9.9.1.jar.sha1 | 1 + server/licenses/lucene-core-9.8.0.jar.sha1 | 1 - server/licenses/lucene-core-9.9.1.jar.sha1 | 1 + .../licenses/lucene-grouping-9.8.0.jar.sha1 | 1 - .../licenses/lucene-grouping-9.9.1.jar.sha1 | 1 + .../lucene-highlighter-9.8.0.jar.sha1 | 1 - .../lucene-highlighter-9.9.1.jar.sha1 | 1 + server/licenses/lucene-join-9.8.0.jar.sha1 | 1 - server/licenses/lucene-join-9.9.1.jar.sha1 | 1 + server/licenses/lucene-memory-9.8.0.jar.sha1 | 1 - server/licenses/lucene-memory-9.9.1.jar.sha1 | 1 + server/licenses/lucene-misc-9.8.0.jar.sha1 | 1 - server/licenses/lucene-misc-9.9.1.jar.sha1 | 1 + server/licenses/lucene-queries-9.8.0.jar.sha1 | 1 - server/licenses/lucene-queries-9.9.1.jar.sha1 | 1 + .../lucene-queryparser-9.8.0.jar.sha1 | 1 - .../lucene-queryparser-9.9.1.jar.sha1 | 1 + server/licenses/lucene-sandbox-9.8.0.jar.sha1 | 1 - server/licenses/lucene-sandbox-9.9.1.jar.sha1 | 1 + .../lucene-spatial-extras-9.8.0.jar.sha1 | 1 - .../lucene-spatial-extras-9.9.1.jar.sha1 | 1 + .../licenses/lucene-spatial3d-9.8.0.jar.sha1 | 1 - .../licenses/lucene-spatial3d-9.9.1.jar.sha1 | 1 + server/licenses/lucene-suggest-9.8.0.jar.sha1 | 1 - server/licenses/lucene-suggest-9.9.1.jar.sha1 | 1 + .../index/store/CorruptedFileIT.java | 12 ++++++ .../validate/SimpleValidateQueryIT.java | 5 ++- .../lucene/queries/BlendedTermQuery.java | 3 +- .../search/grouping/CollapseTopFieldDocs.java | 3 +- .../search/BottomSortValuesCollector.java | 3 +- .../org/opensearch/common/lucene/Lucene.java | 2 +- .../org/opensearch/index/IndexModule.java | 4 +- .../opensearch/index/codec/CodecService.java | 12 +++--- .../PerFieldMappingPostingFormatCodec.java | 4 +- .../index/fielddata/IndexFieldData.java | 8 ++++ .../BytesRefFieldComparatorSource.java | 5 ++- .../DoubleValuesComparatorSource.java | 5 ++- .../FloatValuesComparatorSource.java | 5 ++- .../HalfFloatValuesComparatorSource.java | 5 ++- .../IntValuesComparatorSource.java | 5 ++- .../LongValuesComparatorSource.java | 5 ++- .../UnsignedLongValuesComparatorSource.java | 5 ++- .../index/mapper/CompletionFieldMapper.java | 4 +- .../comparators/HalfFloatComparator.java | 5 ++- .../comparators/UnsignedLongComparator.java | 5 ++- .../queries/SearchAfterSortedDocQuery.java | 3 +- .../bucket/BestBucketsDeferringCollector.java | 5 ++- .../bucket/composite/CompositeAggregator.java | 5 ++- .../search/internal/ContextIndexSearcher.java | 1 + .../search/sort/GeoDistanceSortBuilder.java | 5 ++- .../sort/SortedWiderNumericSortField.java | 7 ++-- .../BottomSortValuesCollectorTests.java | 3 +- .../opensearch/index/codec/CodecTests.java | 20 +++++----- .../engine/CompletionStatsCacheTests.java | 8 ++-- .../index/mapper/RangeFieldTypeTests.java | 4 +- .../query/GeoDistanceQueryBuilderTests.java | 5 ++- .../AggregationProcessorTests.java | 38 ++++++++++++++++--- .../metrics/InternalTopHitsTests.java | 3 +- .../internal/ContextIndexSearcherTests.java | 4 +- .../search/query/QueryPhaseTests.java | 3 +- .../search/query/QueryProfilePhaseTests.java | 3 +- .../searchafter/SearchAfterBuilderTests.java | 3 +- 96 files changed, 219 insertions(+), 136 deletions(-) delete mode 100644 libs/core/licenses/lucene-core-9.8.0.jar.sha1 create mode 100644 libs/core/licenses/lucene-core-9.9.1.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 rename plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/{correlation950 => correlation990}/CorrelationCodec.java (97%) rename plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/{correlation950 => correlation990}/PerFieldCorrelationVectorsFormat.java (77%) rename plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/{correlation950 => correlation990}/package-info.java (96%) rename plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/{correlation950 => correlation990}/CorrelationCodecTests.java (98%) delete mode 100644 server/licenses/lucene-analysis-common-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-core-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-core-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-join-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-join-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-memory-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-misc-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-queries-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.9.1.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-9.8.0.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.9.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index d23a439c35d08..8e0317c6d5475 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -164,6 +164,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.0 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795)) +- Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 58b54e7b77a1e..eceb08c05953d 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 3.0.0 -lucene = 9.8.0 +lucene = 9.9.1 bundled_jdk_vendor = adoptium bundled_jdk = 21.0.1+12 diff --git a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 b/libs/core/licenses/lucene-core-9.8.0.jar.sha1 deleted file mode 100644 index f9a3e2f3cbee6..0000000000000 --- a/libs/core/licenses/lucene-core-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e8421c5f8573bcf22e9265fc7e19469545a775a \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ae596196d9e6a --- /dev/null +++ b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 @@ -0,0 +1 @@ +55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index d94be3f25b53d..b2a8e619c9720 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -99,7 +99,7 @@ public class Version implements Comparable<Version>, ToXContentFragment { public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_8_0); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 deleted file mode 100644 index 892865a017f48..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7725476acfcb9bdfeff1b813ce15c39c6b857dc2 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..402cc36ba3d68 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 @@ -0,0 +1 @@ +1782a69d0e83af9cc3c65db0dcd2e7e7c1e5f90e \ No newline at end of file diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java index d83811e6668eb..a653edbd05992 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -135,35 +135,40 @@ public void testRangeQuery() throws IOException { public void testRoundsUpperBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(null, 0.1, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.1, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.095, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.095, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 9]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 9]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.105, true, false, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 0.105, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 10]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 10]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(null, 79.99, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9223372036854775808 TO 7999]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9223372036854775808 TO 7999]", getQueryString(scaledFloatQ)); } public void testRoundsLowerBoundCorrectly() { ScaledFloatFieldMapper.ScaledFloatFieldType ft = new ScaledFloatFieldMapper.ScaledFloatFieldType("scaled_float", 100); Query scaledFloatQ = ft.rangeQuery(-0.1, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.1, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.095, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.095, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-9 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-9 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.105, null, false, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); scaledFloatQ = ft.rangeQuery(-0.105, null, true, true, MOCK_QSC); - assertEquals("scaled_float:[-10 TO 9223372036854775807]", scaledFloatQ.toString()); + assertEquals("scaled_float:[-10 TO 9223372036854775807]", getQueryString(scaledFloatQ)); + } + + private String getQueryString(Query query) { + assertTrue(query instanceof IndexOrDocValuesQuery); + return ((IndexOrDocValuesQuery) query).getIndexQuery().toString(); } public void testValueForSearch() { diff --git a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java index 743b51e979fb7..99d576bed01c7 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/http/netty4/Netty4HttpPipeliningHandlerTests.java @@ -79,7 +79,7 @@ public class Netty4HttpPipeliningHandlerTests extends OpenSearchTestCase { @After public void tearDown() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); - shutdownExecutorService(); + shutdownExecutorServices(); super.tearDown(); } @@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) { return finishingRequests.get(url); } - private void shutdownExecutorService() throws InterruptedException { + private void shutdownExecutorServices() throws InterruptedException { if (!handlerService.isShutdown()) { handlerService.shutdown(); handlerService.awaitTermination(10, TimeUnit.SECONDS); diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 deleted file mode 100644 index ef410899981ca..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7133d34e92770f59eb28686f4d511b9f3f32e970 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..dde9b7c100dc7 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 @@ -0,0 +1 @@ +147cb42a90a29501d9ca6094ea0db1d213f3076a \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 deleted file mode 100644 index 46b83c9e40b3a..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -be44282e1f6b91a0650fcceb558053d6bdd4863d \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..b70a22e9db096 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 @@ -0,0 +1 @@ +b034dd3a975763e083c7e11b5d0f7d516ab72590 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java index 2939711f6f7e1..76b109932e642 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java @@ -38,7 +38,7 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.ja.util.CSVUtil; +import org.apache.lucene.analysis.util.CSVUtil; import org.opensearch.OpenSearchException; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 deleted file mode 100644 index 36664695a7818..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bd1f80d33346f7e588685484ef29a304db5190e4 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..323f165c62790 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 @@ -0,0 +1 @@ +c405f2f7d0fc127d88dfbadd753469b2028fdf52 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 deleted file mode 100644 index 003ccdf8b0727..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9ffdc7a52d2087ecb03318ec06305b480cdfe82 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..dd659ddf4de95 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 @@ -0,0 +1 @@ +970e5775876c2d7e1b9af7421a4b17d96f63faf4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 deleted file mode 100644 index e22eaa474016f..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f73e2007b133fb699e517ef13b4952844f0150d8 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ed0e81d8f1f75 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 @@ -0,0 +1 @@ +2421e5238e9b8484929291744d709dd743c01da1 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 deleted file mode 100644 index 1ebe42a2a2f56..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c09cbc021a8f81a01600a1d2a999361e70f7aed \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..fd8e000088180 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 @@ -0,0 +1 @@ +a23e7de4cd9ae7af285c89dc1c55e0ac3f157fd3 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 deleted file mode 100644 index 3c4523d45c0f5..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b054f2c7b11fc7c5601b4c3cdf18aa7508612898 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..d0e7a3b0c751c --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 @@ -0,0 +1 @@ +8d9bce1ea51db279878c51091dd9aefc7b335da4 \ No newline at end of file diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java index 5e2cb8bfbc03a..3fcc995fb4199 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/CorrelationCodecVersion.java @@ -9,10 +9,10 @@ package org.opensearch.plugin.correlation.core.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.index.mapper.MapperService; -import org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec; -import org.opensearch.plugin.correlation.core.index.codec.correlation950.PerFieldCorrelationVectorsFormat; +import org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec; +import org.opensearch.plugin.correlation.core.index.codec.correlation990.PerFieldCorrelationVectorsFormat; import java.util.Optional; import java.util.function.BiFunction; @@ -24,15 +24,15 @@ * @opensearch.internal */ public enum CorrelationCodecVersion { - V_9_5_0( + V_9_9_0( "CorrelationCodec", - new Lucene95Codec(), + new Lucene99Codec(), new PerFieldCorrelationVectorsFormat(Optional.empty()), (userCodec, mapperService) -> new CorrelationCodec(userCodec, new PerFieldCorrelationVectorsFormat(Optional.of(mapperService))), CorrelationCodec::new ); - private static final CorrelationCodecVersion CURRENT = V_9_5_0; + private static final CorrelationCodecVersion CURRENT = V_9_9_0; private final String codecName; private final Codec defaultCodecDelegate; private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java similarity index 97% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java index f91ba429fbea9..022972e2e06c3 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodec.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodec.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; @@ -19,7 +19,7 @@ * @opensearch.internal */ public class CorrelationCodec extends FilterCodec { - private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_5_0; + private static final CorrelationCodecVersion VERSION = CorrelationCodecVersion.V_9_9_0; private final PerFieldCorrelationVectorsFormat perFieldCorrelationVectorsFormat; /** diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java similarity index 77% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java index f6862ecc17736..89cc0b614a1a5 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/PerFieldCorrelationVectorsFormat.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/PerFieldCorrelationVectorsFormat.java @@ -6,9 +6,9 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; -import org.apache.lucene.codecs.lucene95.Lucene95HnswVectorsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.opensearch.index.mapper.MapperService; import org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat; @@ -26,10 +26,10 @@ public class PerFieldCorrelationVectorsFormat extends BasePerFieldCorrelationVec public PerFieldCorrelationVectorsFormat(final Optional<MapperService> mapperService) { super( mapperService, - Lucene95HnswVectorsFormat.DEFAULT_MAX_CONN, - Lucene95HnswVectorsFormat.DEFAULT_BEAM_WIDTH, - Lucene95HnswVectorsFormat::new, - Lucene95HnswVectorsFormat::new + Lucene99HnswVectorsFormat.DEFAULT_MAX_CONN, + Lucene99HnswVectorsFormat.DEFAULT_BEAM_WIDTH, + Lucene99HnswVectorsFormat::new, + Lucene99HnswVectorsFormat::new ); } } diff --git a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java similarity index 96% rename from plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java rename to plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java index b4dad34d2718e..fc2a9de58a73a 100644 --- a/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/package-info.java +++ b/plugins/events-correlation-engine/src/main/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/package-info.java @@ -9,4 +9,4 @@ /** * custom Lucene9.5 codec package for events-correlation-engine */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; diff --git a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 598a3b6af73c2..013c17e4a9736 100644 --- a/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/plugins/events-correlation-engine/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1 +1 @@ -org.opensearch.plugin.correlation.core.index.codec.correlation950.CorrelationCodec +org.opensearch.plugin.correlation.core.index.codec.correlation990.CorrelationCodec diff --git a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java similarity index 98% rename from plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java rename to plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java index b93172537d419..7223b450a136c 100644 --- a/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation950/CorrelationCodecTests.java +++ b/plugins/events-correlation-engine/src/test/java/org/opensearch/plugin/correlation/core/index/codec/correlation990/CorrelationCodecTests.java @@ -6,7 +6,7 @@ * compatible open source license. */ -package org.opensearch.plugin.correlation.core.index.codec.correlation950; +package org.opensearch.plugin.correlation.core.index.codec.correlation990; import org.apache.lucene.codecs.Codec; import org.apache.lucene.document.Document; @@ -32,7 +32,7 @@ import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_EF_CONSTRUCTION; import static org.opensearch.plugin.correlation.core.index.codec.BasePerFieldCorrelationVectorsFormat.METHOD_PARAMETER_M; -import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_5_0; +import static org.opensearch.plugin.correlation.core.index.codec.CorrelationCodecVersion.V_9_9_0; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -56,7 +56,7 @@ public void testCorrelationVectorIndex() throws Exception { Function<MapperService, PerFieldCorrelationVectorsFormat> perFieldCorrelationVectorsProvider = mapperService -> new PerFieldCorrelationVectorsFormat(Optional.of(mapperService)); Function<PerFieldCorrelationVectorsFormat, Codec> correlationCodecProvider = (correlationVectorsFormat -> new CorrelationCodec( - V_9_5_0.getDefaultCodecDelegate(), + V_9_9_0.getDefaultCodecDelegate(), correlationVectorsFormat )); testCorrelationVectorIndex(correlationCodecProvider, perFieldCorrelationVectorsProvider); diff --git a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java index 46cf6ae708d1c..d0c0406bd7774 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/http/nio/NioHttpPipeliningHandlerTests.java @@ -80,7 +80,7 @@ public class NioHttpPipeliningHandlerTests extends OpenSearchTestCase { @After public void cleanup() throws Exception { waitingRequests.keySet().forEach(this::finishRequest); - shutdownExecutorService(); + shutdownExecutorServices(); } private CountDownLatch finishRequest(String url) { @@ -88,7 +88,7 @@ private CountDownLatch finishRequest(String url) { return finishingRequests.get(url); } - private void shutdownExecutorService() throws InterruptedException { + private void shutdownExecutorServices() throws InterruptedException { if (!handlerService.isShutdown()) { handlerService.shutdown(); handlerService.awaitTermination(10, TimeUnit.SECONDS); diff --git a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 deleted file mode 100644 index 6ad304fa52c12..0000000000000 --- a/server/licenses/lucene-analysis-common-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36f0363325ca7bf62c180160d1ed5165c7c37795 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..c9e6120da7497 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 @@ -0,0 +1 @@ +24c8401b530308f9568eb7b408c2029c63f564c6 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 deleted file mode 100644 index f104c4207d390..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e98fb408028f40170e6d87c16422bfdc0bb2e392 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..69ecf6aa68200 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 @@ -0,0 +1 @@ +11c46007366bb037be7d271ab0a5849b1d544662 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.8.0.jar.sha1 b/server/licenses/lucene-core-9.8.0.jar.sha1 deleted file mode 100644 index f9a3e2f3cbee6..0000000000000 --- a/server/licenses/lucene-core-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5e8421c5f8573bcf22e9265fc7e19469545a775a \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.1.jar.sha1 b/server/licenses/lucene-core-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..ae596196d9e6a --- /dev/null +++ b/server/licenses/lucene-core-9.9.1.jar.sha1 @@ -0,0 +1 @@ +55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.8.0.jar.sha1 b/server/licenses/lucene-grouping-9.8.0.jar.sha1 deleted file mode 100644 index ab132121b2edc..0000000000000 --- a/server/licenses/lucene-grouping-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d39184518351178c404ed9669fc6cb6111f2288d \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.1.jar.sha1 b/server/licenses/lucene-grouping-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..e7df056400661 --- /dev/null +++ b/server/licenses/lucene-grouping-9.9.1.jar.sha1 @@ -0,0 +1 @@ +2f2785e17c5c823cc8f41a7ddb4647aaca8ee773 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.8.0.jar.sha1 b/server/licenses/lucene-highlighter-9.8.0.jar.sha1 deleted file mode 100644 index c7cb678fb7b72..0000000000000 --- a/server/licenses/lucene-highlighter-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1ac38c8278dbd63dfab30744a41dd955a415a31c \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..828c7294aa586 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 @@ -0,0 +1 @@ +30928513461bf79a5cb057e84da7d34a1e53227d \ No newline at end of file diff --git a/server/licenses/lucene-join-9.8.0.jar.sha1 b/server/licenses/lucene-join-9.8.0.jar.sha1 deleted file mode 100644 index 2b6cb8af4faf6..0000000000000 --- a/server/licenses/lucene-join-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3d64fc57bb6e718d906413a9f73c713e6d4d8bb0 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.1.jar.sha1 b/server/licenses/lucene-join-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..34b44ca8c6ad5 --- /dev/null +++ b/server/licenses/lucene-join-9.9.1.jar.sha1 @@ -0,0 +1 @@ +b9c8cc99632280148f92b4c0a64111c482d5d0ac \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.8.0.jar.sha1 b/server/licenses/lucene-memory-9.8.0.jar.sha1 deleted file mode 100644 index 5fdfee401dd0a..0000000000000 --- a/server/licenses/lucene-memory-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5283ac71d6ccecb5e00c7b52df2faec012f2625a \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.1.jar.sha1 b/server/licenses/lucene-memory-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..b75fba4c331e9 --- /dev/null +++ b/server/licenses/lucene-memory-9.9.1.jar.sha1 @@ -0,0 +1 @@ +49f820b1b321860fa42a4f7583e8ed8f77b9c1c2 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.8.0.jar.sha1 b/server/licenses/lucene-misc-9.8.0.jar.sha1 deleted file mode 100644 index cf815cba15862..0000000000000 --- a/server/licenses/lucene-misc-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a57b049cf51a5e9c9c1909c420f645f1b6f9a54 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.1.jar.sha1 b/server/licenses/lucene-misc-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..f1e1e056004e9 --- /dev/null +++ b/server/licenses/lucene-misc-9.9.1.jar.sha1 @@ -0,0 +1 @@ +db7c30217602dfcda394a4d0f0a9e68140d385a6 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.8.0.jar.sha1 b/server/licenses/lucene-queries-9.8.0.jar.sha1 deleted file mode 100644 index 09f369ef18e12..0000000000000 --- a/server/licenses/lucene-queries-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -628db4ef46f1c6a05145bdac1d1bc4ace6341b13 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.1.jar.sha1 b/server/licenses/lucene-queries-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..888b9b4a05ec8 --- /dev/null +++ b/server/licenses/lucene-queries-9.9.1.jar.sha1 @@ -0,0 +1 @@ +d157547bd24edc8e9d9d59c273107dc3ac5fde5e \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.8.0.jar.sha1 b/server/licenses/lucene-queryparser-9.8.0.jar.sha1 deleted file mode 100644 index 2a42a8956b18b..0000000000000 --- a/server/licenses/lucene-queryparser-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -982faf2bfa55542bf57fbadef54c19ac00f57cae \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..1ce8a069a0f4e --- /dev/null +++ b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 @@ -0,0 +1 @@ +12d844fe224f6f97c510ac20d68903ed7f626f6c \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.8.0.jar.sha1 b/server/licenses/lucene-sandbox-9.8.0.jar.sha1 deleted file mode 100644 index 64a0b07f72d29..0000000000000 --- a/server/licenses/lucene-sandbox-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -06493dbd14d02537716822254866a94458f4d842 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..14fd86dadc404 --- /dev/null +++ b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 @@ -0,0 +1 @@ +272e588fd3d8c0a401b28a1ac715f27044bf62ec \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 deleted file mode 100644 index d1bcb0581435c..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9d9a731822ad6eefa1ba288a0c158d478522f165 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..0efd5a7595bfe --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 @@ -0,0 +1 @@ +e066432e7ab02b2a4914f989bcd8c44adbf340ad \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 deleted file mode 100644 index d17459cc569a9..0000000000000 --- a/server/licenses/lucene-spatial3d-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ce752a52b2d4eac90633c7df7982e29504f99e76 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..7f06466e4c721 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 @@ -0,0 +1 @@ +fa54c9b962778e28ebc0efb9f75297781350361a \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.8.0.jar.sha1 b/server/licenses/lucene-suggest-9.8.0.jar.sha1 deleted file mode 100644 index ff47b87672d2c..0000000000000 --- a/server/licenses/lucene-suggest-9.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f977f96f2093b7fddea6b67caa2e1c5b10edebf6 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.1.jar.sha1 b/server/licenses/lucene-suggest-9.9.1.jar.sha1 new file mode 100644 index 0000000000000..06732480d1b6c --- /dev/null +++ b/server/licenses/lucene-suggest-9.9.1.jar.sha1 @@ -0,0 +1 @@ +9554de5b22ae7483b344b94a9a956960b7a5d49c \ No newline at end of file diff --git a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java index 8291fef5d177b..f46f413f4d23f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/store/CorruptedFileIT.java @@ -710,6 +710,7 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro final NodeStats primaryNode = dataNodeStats.get(0); final NodeStats replicaNode = dataNodeStats.get(1); + assertAcked( prepareCreate("test").setSettings( Settings.builder() @@ -795,6 +796,17 @@ public void testPrimaryCorruptionDuringReplicationDoesNotFailReplicaShard() thro // Assert the cluster returns to green status because the replica will be promoted to primary ensureGreen(); + + // After Lucene 9.9 check index will flag corruption with old (not the latest) commit points. + // For this test our previous corrupt commit "segments_2" will remain on the primary. + // This asserts this is the case, and then resets check index status. + assertEquals("Check index has a single failure", 1, checkIndexFailures.size()); + assertTrue( + checkIndexFailures.get(0) + .getMessage() + .contains("could not read old (not latest) commit point segments file \"segments_2\" in directory") + ); + resetCheckIndexStatus(); } private int numShards(String... index) { diff --git a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java index e619b93cdd93b..4ac2e1669ca67 100644 --- a/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/validate/SimpleValidateQueryIT.java @@ -270,7 +270,10 @@ public void testExplainDateRangeInQueryString() { long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000; long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1; - assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]")); + assertThat( + response.getQueryExplanation().get(0).getExplanation(), + containsString("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]") + ); assertThat(response.isValid(), equalTo(true)); } diff --git a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 345be330f048c..b47b974b96fed 100644 --- a/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -100,11 +100,10 @@ public Query rewrite(IndexSearcher searcher) throws IOException { return rewritten; } IndexReader reader = searcher.getIndexReader(); - IndexReaderContext context = reader.getContext(); TermStates[] ctx = new TermStates[terms.length]; int[] docFreqs = new int[ctx.length]; for (int i = 0; i < terms.length; i++) { - ctx[i] = TermStates.build(context, terms[i], true); + ctx[i] = TermStates.build(searcher, terms[i], true); docFreqs[i] = ctx[i].docFreq(); } diff --git a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java index e93e5cdcc3f7b..961587113173d 100644 --- a/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java +++ b/server/src/main/java/org/apache/lucene/search/grouping/CollapseTopFieldDocs.java @@ -33,6 +33,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -144,7 +145,7 @@ private static class MergeSortQueue extends PriorityQueue<ShardRef> { reverseMul = new int[sortFields.length]; for (int compIDX = 0; compIDX < sortFields.length; compIDX++) { final SortField sortField = sortFields[compIDX]; - comparators[compIDX] = sortField.getComparator(1, false); + comparators[compIDX] = sortField.getComparator(1, Pruning.NONE); reverseMul[compIDX] = sortField.getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java index c831c80b6455c..bce8d9fb2b1ca 100644 --- a/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/opensearch/action/search/BottomSortValuesCollector.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.opensearch.search.DocValueFormat; @@ -59,7 +60,7 @@ class BottomSortValuesCollector { this.reverseMuls = new int[sortFields.length]; this.sortFields = sortFields; for (int i = 0; i < sortFields.length; i++) { - comparators[i] = sortFields[i].getComparator(1, false); + comparators[i] = sortFields[i].getComparator(1, Pruning.NONE); reverseMuls[i] = sortFields[i].getReverse() ? -1 : 1; } } diff --git a/server/src/main/java/org/opensearch/common/lucene/Lucene.java b/server/src/main/java/org/opensearch/common/lucene/Lucene.java index 5b521081ac63b..2c7b6b552b43f 100644 --- a/server/src/main/java/org/opensearch/common/lucene/Lucene.java +++ b/server/src/main/java/org/opensearch/common/lucene/Lucene.java @@ -110,7 +110,7 @@ * @opensearch.internal */ public class Lucene { - public static final String LATEST_CODEC = "Lucene95"; + public static final String LATEST_CODEC = "Lucene99"; public static final String SOFT_DELETES_FIELD = "__soft_deletes"; diff --git a/server/src/main/java/org/opensearch/index/IndexModule.java b/server/src/main/java/org/opensearch/index/IndexModule.java index 545623287bae8..6ac10a221d49e 100644 --- a/server/src/main/java/org/opensearch/index/IndexModule.java +++ b/server/src/main/java/org/opensearch/index/IndexModule.java @@ -161,7 +161,7 @@ public final class IndexModule { /** Which lucene file extensions to load with the mmap directory when using hybridfs store. This settings is ignored if {@link #INDEX_STORE_HYBRID_NIO_EXTENSIONS} is set. * This is an expert setting. - * @see <a href="https://lucene.apache.org/core/9_5_0/core/org/apache/lucene/codecs/lucene95/package-summary.html#file-names">Lucene File Extensions</a>. + * @see <a href="https://lucene.apache.org/core/9_9_0/core/org/apache/lucene/codecs/lucene99/package-summary.html#file-names">Lucene File Extensions</a>. * * @deprecated This setting will be removed in OpenSearch 3.x. Use {@link #INDEX_STORE_HYBRID_NIO_EXTENSIONS} instead. */ @@ -206,7 +206,7 @@ public Iterator<Setting<?>> settings() { /** Which lucene file extensions to load with nio. All others will default to mmap. Takes precedence over {@link #INDEX_STORE_HYBRID_MMAP_EXTENSIONS}. * This is an expert setting. - * @see <a href="https://lucene.apache.org/core/9_5_0/core/org/apache/lucene/codecs/lucene95/package-summary.html#file-names">Lucene File Extensions</a>. + * @see <a href="https://lucene.apache.org/core/9_9_0/core/org/apache/lucene/codecs/lucene99/package-summary.html#file-names">Lucene File Extensions</a>. */ public static final Setting<List<String>> INDEX_STORE_HYBRID_NIO_EXTENSIONS = Setting.listSetting( "index.store.hybrid.nio.extensions", diff --git a/server/src/main/java/org/opensearch/index/codec/CodecService.java b/server/src/main/java/org/opensearch/index/codec/CodecService.java index 775fc88b504f5..67f38536a0d11 100644 --- a/server/src/main/java/org/opensearch/index/codec/CodecService.java +++ b/server/src/main/java/org/opensearch/index/codec/CodecService.java @@ -34,8 +34,8 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; -import org.apache.lucene.codecs.lucene95.Lucene95Codec.Mode; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec.Mode; import org.opensearch.common.Nullable; import org.opensearch.common.collect.MapBuilder; import org.opensearch.index.IndexSettings; @@ -68,10 +68,10 @@ public CodecService(@Nullable MapperService mapperService, IndexSettings indexSe final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder(); assert null != indexSettings; if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene95Codec()); - codecs.put(LZ4, new Lucene95Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene95Codec(Mode.BEST_COMPRESSION)); - codecs.put(ZLIB, new Lucene95Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene99Codec()); + codecs.put(LZ4, new Lucene99Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene99Codec(Mode.BEST_COMPRESSION)); + codecs.put(ZLIB, new Lucene99Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); codecs.put(LZ4, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index d3207557273a5..dc28ad2d6dc07 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -37,7 +37,7 @@ import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.common.lucene.Lucene; import org.opensearch.index.mapper.CompletionFieldMapper; import org.opensearch.index.mapper.MappedFieldType; @@ -53,7 +53,7 @@ * * @opensearch.internal */ -public class PerFieldMappingPostingFormatCodec extends Lucene95Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene99Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); diff --git a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java index 6a79679cc2f09..a63e1d418e3ba 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/IndexFieldData.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparatorSource; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -150,6 +151,13 @@ public void disableSkipping() { this.enableSkipping = false; } + protected Pruning filterPruning(Pruning pruning) { + if (this.enableSkipping) { + return pruning; + } + return Pruning.NONE; + } + /** * Simple wrapper class around a filter that matches parent documents * and a filter that matches child documents. For every root document R, diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 3aa4d9cb782ca..4c6eba2f0c7ec 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -38,6 +38,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.TermOrdValComparator; @@ -94,7 +95,7 @@ protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOEx protected void setScorer(Scorable scorer, LeafReaderContext context) {} @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final boolean sortMissingLast = sortMissingLast(missingValue) ^ reversed; @@ -105,7 +106,7 @@ public FieldComparator<?> newComparator(String fieldname, int numHits, boolean e indexFieldData.getFieldName(), sortMissingLast, reversed, - enableSkipping + filterPruning(pruning) ) { @Override diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 4f9f05addc8c7..fd4fac3877f2f 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; @@ -98,11 +99,11 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, doubl protected void setScorer(Scorable scorer, LeafReaderContext context) {} @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final double dMissingValue = (Double) missingObject(missingValue, reversed); - return new DoubleComparator(numHits, fieldname, dMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new DoubleComparator(numHits, fieldname, dMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 3fb8b5c6ce393..b106ba268bcb9 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.FloatComparator; @@ -91,11 +92,11 @@ private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); - return new FloatComparator(numHits, fieldname, fMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new FloatComparator(numHits, fieldname, fMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new FloatLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java index e0227083f4181..e2e56dcb14fdf 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/HalfFloatValuesComparatorSource.java @@ -13,6 +13,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.util.BitSet; import org.opensearch.index.fielddata.FieldData; import org.opensearch.index.fielddata.IndexNumericFieldData; @@ -42,11 +43,11 @@ public HalfFloatValuesComparatorSource( } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final float fMissingValue = (Float) missingObject(missingValue, reversed); - return new HalfFloatComparator(numHits, fieldname, fMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new HalfFloatComparator(numHits, fieldname, fMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new HalfFloatLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java index 3a3fdb85c3415..8f540cc6ae9d9 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/IntValuesComparatorSource.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.IntComparator; import org.apache.lucene.util.BitSet; @@ -70,11 +71,11 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, int miss } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final int iMissingValue = (Integer) missingObject(missingValue, reversed); - return new IntComparator(numHits, fieldname, iMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new IntComparator(numHits, fieldname, iMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new IntLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 57568aa84fa5c..3666cd8d6dfea 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -37,6 +37,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.BitSet; @@ -114,11 +115,11 @@ private NumericDocValues getNumericDocValues(LeafReaderContext context, long mis } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final long lMissingValue = (Long) missingObject(missingValue, reversed); - return new LongComparator(numHits, fieldname, lMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new LongComparator(numHits, fieldname, lMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java index 9d1edde1fe08f..3714561b63e44 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java +++ b/server/src/main/java/org/opensearch/index/fielddata/fieldcomparator/UnsignedLongValuesComparatorSource.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; import org.opensearch.common.Nullable; @@ -88,11 +89,11 @@ public Object missingObject(Object missingValue, boolean reversed) { } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName()); final BigInteger ulMissingValue = (BigInteger) missingObject(missingValue, reversed); - return new UnsignedLongComparator(numHits, fieldname, ulMissingValue, reversed, enableSkipping && this.enableSkipping) { + return new UnsignedLongComparator(numHits, fieldname, ulMissingValue, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new UnsignedLongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java index e1413fd9b4bbe..a9d9f6cb35fcb 100644 --- a/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/CompletionFieldMapper.java @@ -35,7 +35,7 @@ import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.Term; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionAnalyzer; import org.apache.lucene.search.suggest.document.CompletionQuery; import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery; @@ -330,7 +330,7 @@ public ContextMappings getContextMappings() { */ public static synchronized PostingsFormat postingsFormat() { if (postingsFormat == null) { - postingsFormat = new Completion90PostingsFormat(); + postingsFormat = new Completion99PostingsFormat(); } return postingsFormat; } diff --git a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java index 6244fa647b042..b2e2ba8001b88 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/HalfFloatComparator.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; import java.io.IOException; @@ -25,8 +26,8 @@ public class HalfFloatComparator extends NumericComparator<Float> { protected float topValue; protected float bottom; - public HalfFloatComparator(int numHits, String field, Float missingValue, boolean reverse, boolean enableSkipping) { - super(field, missingValue != null ? missingValue : 0.0f, reverse, enableSkipping, HalfFloatPoint.BYTES); + public HalfFloatComparator(int numHits, String field, Float missingValue, boolean reverse, Pruning pruning) { + super(field, missingValue != null ? missingValue : 0.0f, reverse, pruning, HalfFloatPoint.BYTES); values = new float[numHits]; } diff --git a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java index d46b34fe97356..2b6bd9933e553 100644 --- a/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java +++ b/server/src/main/java/org/opensearch/index/search/comparators/UnsignedLongComparator.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.sandbox.document.BigIntegerPoint; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.comparators.NumericComparator; import org.opensearch.common.Numbers; @@ -23,8 +24,8 @@ public class UnsignedLongComparator extends NumericComparator<BigInteger> { protected BigInteger topValue; protected BigInteger bottom; - public UnsignedLongComparator(int numHits, String field, BigInteger missingValue, boolean reverse, boolean enableSkipping) { - super(field, missingValue != null ? missingValue : Numbers.MIN_UNSIGNED_LONG_VALUE, reverse, enableSkipping, BigIntegerPoint.BYTES); + public UnsignedLongComparator(int numHits, String field, BigInteger missingValue, boolean reverse, Pruning pruning) { + super(field, missingValue != null ? missingValue : Numbers.MIN_UNSIGNED_LONG_VALUE, reverse, pruning, BigIntegerPoint.BYTES); values = new BigInteger[numHits]; } diff --git a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java index e417a2eaa7cf4..600ba5b5a92d8 100644 --- a/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/opensearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -40,6 +40,7 @@ import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; @@ -77,7 +78,7 @@ public SearchAfterSortedDocQuery(Sort sort, FieldDoc after) { this.reverseMuls = new int[numFields]; for (int i = 0; i < numFields; i++) { SortField sortField = sort.getSort()[i]; - FieldComparator<?> fieldComparator = sortField.getComparator(1, false); + FieldComparator<?> fieldComparator = sortField.getComparator(1, Pruning.NONE); @SuppressWarnings("unchecked") FieldComparator<Object> comparator = (FieldComparator<Object>) fieldComparator; comparator.setTopValue(after.fields[i]); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java index 2e7c4659bcb00..223be3ba2d1ae 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/BestBucketsDeferringCollector.java @@ -90,7 +90,7 @@ static class Entry { protected PackedLongValues.Builder docDeltasBuilder; protected PackedLongValues.Builder bucketsBuilder; protected long maxBucket = -1; - protected boolean finished = false; + protected boolean finished; protected LongHash selectedBuckets; /** @@ -101,6 +101,9 @@ static class Entry { public BestBucketsDeferringCollector(SearchContext context, boolean isGlobal) { this.searchContext = context; this.isGlobal = isGlobal; + // a postCollection call is not made by the IndexSearcher when there are no segments. + // In this case init the collector as finished. + this.finished = context.searcher().getLeafContexts().isEmpty(); } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index f2a4d5cd46127..317c2a357bac5 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; @@ -354,8 +355,8 @@ public int hashCode() { } @Override - public FieldComparator<?> getComparator(int numHits, boolean enableSkipping) { - return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), false) { + public FieldComparator<?> getComparator(int numHits, Pruning pruning) { + return new LongComparator(1, delegate.getField(), (Long) missingValue, delegate.getReverse(), Pruning.NONE) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new LongLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index b042f3cf41d61..403b0b545c113 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -102,6 +102,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { * The interval at which we check for search cancellation when we cannot use * a {@link CancellableBulkScorer}. See {@link #intersectScorerAndBitSet}. */ + private static final int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; private AggregatedDfs aggregatedDfs; diff --git a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java index c4bb54de2eedd..0499bba3245c6 100644 --- a/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java +++ b/server/src/main/java/org/opensearch/search/sort/GeoDistanceSortBuilder.java @@ -38,6 +38,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.comparators.DoubleComparator; import org.apache.lucene.util.BitSet; @@ -734,8 +735,8 @@ private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) th } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { - return new DoubleComparator(numHits, null, null, reversed, enableSkipping) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { + return new DoubleComparator(numHits, null, null, reversed, filterPruning(pruning)) { @Override public LeafFieldComparator getLeafComparator(LeafReaderContext context) throws IOException { return new DoubleLeafComparator(context) { diff --git a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java index bded2417ba6c1..10cc832fdb684 100644 --- a/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java +++ b/server/src/main/java/org/opensearch/search/sort/SortedWiderNumericSortField.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.LeafFieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.comparators.NumericComparator; @@ -44,13 +45,13 @@ public SortedWiderNumericSortField(String field, Type type, boolean reverse) { * Creates and return a comparator, which always converts Numeric to double * and compare to support multi type comparison between numeric values * @param numHits number of top hits the queue will store - * @param enableSkipping true if the comparator can skip documents via {@link + * @param pruning controls how the comparator skips documents via {@link * LeafFieldComparator#competitiveIterator()} * @return NumericComparator */ @Override - public FieldComparator<?> getComparator(int numHits, boolean enableSkipping) { - return new NumericComparator<Number>(getField(), (Number) getMissingValue(), getReverse(), enableSkipping, Double.BYTES) { + public FieldComparator<?> getComparator(int numHits, Pruning pruning) { + return new NumericComparator<Number>(getField(), (Number) getMissingValue(), getReverse(), pruning, Double.BYTES) { @Override public int compare(int slot1, int slot2) { throw new UnsupportedOperationException(); diff --git a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java index 8042a7e296869..4f929a71429a6 100644 --- a/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java +++ b/server/src/test/java/org/opensearch/action/search/BottomSortValuesCollectorTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TotalHits; @@ -264,7 +265,7 @@ private Object[] newDateNanoArray(String... values) { private TopFieldDocs createTopDocs(SortField sortField, int totalHits, Object[] values) { FieldDoc[] fieldDocs = new FieldDoc[values.length]; - FieldComparator cmp = sortField.getComparator(1, false); + FieldComparator cmp = sortField.getComparator(1, Pruning.NONE); for (int i = 0; i < values.length; i++) { fieldDocs[i] = new FieldDoc(i, Float.NaN, new Object[] { values[i] }); } diff --git a/server/src/test/java/org/opensearch/index/codec/CodecTests.java b/server/src/test/java/org/opensearch/index/codec/CodecTests.java index 6f9ff78aa0143..b31edd79411d0 100644 --- a/server/src/test/java/org/opensearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/opensearch/index/codec/CodecTests.java @@ -35,7 +35,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene90.Lucene90StoredFieldsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -68,28 +68,28 @@ public class CodecTests extends OpenSearchTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(false); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene95Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene99Codec.class)); } public void testDefault() throws Exception { Codec codec = createCodecService(false).codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompression() throws Exception { Codec codec = createCodecService(false).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } public void testLZ4() throws Exception { Codec codec = createCodecService(false).codec("lz4"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } public void testZlib() throws Exception { Codec codec = createCodecService(false).codec("zlib"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); assert codec instanceof PerFieldMappingPostingFormatCodec; } @@ -125,12 +125,12 @@ public void testLuceneCodecsWithCompressionLevel() { public void testDefaultMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("default"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_SPEED, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_SPEED, codec); } public void testBestCompressionMapperServiceNull() throws Exception { Codec codec = createCodecService(true).codec("best_compression"); - assertStoredFieldsCompressionEquals(Lucene95Codec.Mode.BEST_COMPRESSION, codec); + assertStoredFieldsCompressionEquals(Lucene99Codec.Mode.BEST_COMPRESSION, codec); } public void testExceptionCodecNull() { @@ -142,11 +142,11 @@ public void testExceptionIndexSettingsNull() { } // write some docs with it, inspect .si to see this was the used compression - private void assertStoredFieldsCompressionEquals(Lucene95Codec.Mode expected, Codec actual) throws Exception { + private void assertStoredFieldsCompressionEquals(Lucene99Codec.Mode expected, Codec actual) throws Exception { SegmentReader sr = getSegmentReader(actual); String v = sr.getSegmentInfo().info.getAttribute(Lucene90StoredFieldsFormat.MODE_KEY); assertNotNull(v); - assertEquals(expected, Lucene95Codec.Mode.valueOf(v)); + assertEquals(expected, Lucene99Codec.Mode.valueOf(v)); } private CodecService createCodecService(boolean isMapperServiceNull) throws IOException { diff --git a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java index b746d0ba8a56d..0c87c384e0749 100644 --- a/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/opensearch/index/engine/CompletionStatsCacheTests.java @@ -32,14 +32,14 @@ package org.opensearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene95.Lucene95Codec; +import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.suggest.document.Completion90PostingsFormat; +import org.apache.lucene.search.suggest.document.Completion99PostingsFormat; import org.apache.lucene.search.suggest.document.SuggestField; import org.apache.lucene.store.Directory; import org.opensearch.OpenSearchException; @@ -69,8 +69,8 @@ public void testExceptionsAreNotCached() { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); - final PostingsFormat postingsFormat = new Completion90PostingsFormat(); - indexWriterConfig.setCodec(new Lucene95Codec() { + final PostingsFormat postingsFormat = new Completion99PostingsFormat(); + indexWriterConfig.setCodec(new Lucene99Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java index 755d77c6ae392..00b48240d0567 100644 --- a/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/RangeFieldTypeTests.java @@ -277,12 +277,12 @@ public void testDateRangeQueryUsingMappingFormat() { RangeFieldType fieldType = new RangeFieldType("field", formatter); final Query query = fieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", query.toString()); + assertEquals("field:<ranges:[1465975790000 : 1466062190999]>", ((IndexOrDocValuesQuery) query).getIndexQuery().toString()); // compare lower and upper bounds with what we would get on a `date` field DateFieldType dateFieldType = new DateFieldType("field", DateFieldMapper.Resolution.MILLISECONDS, formatter); final Query queryOnDateField = dateFieldType.rangeQuery(from, to, true, true, relation, null, fieldType.dateMathParser(), context); - assertEquals("field:[1465975790000 TO 1466062190999]", queryOnDateField.toString()); + assertEquals("field:[1465975790000 TO 1466062190999]", ((IndexOrDocValuesQuery) queryOnDateField).getIndexQuery().toString()); } /** diff --git a/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java index 1b62fef30d255..0dfe47e83fedc 100644 --- a/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/GeoDistanceQueryBuilderTests.java @@ -358,7 +358,10 @@ private void assertGeoDistanceRangeQuery(String query, double lat, double lon, d Query parsedQuery = parseQuery(query).toQuery(createShardContext()); // The parsedQuery contains IndexOrDocValuesQuery, which wraps LatLonPointDistanceQuery which in turn has default visibility, // so we cannot access its fields directly to check and have to use toString() here instead. - assertEquals(parsedQuery.toString(), "mapped_geo_point:" + lat + "," + lon + " +/- " + distanceUnit.toMeters(distance) + " meters"); + assertEquals( + ((IndexOrDocValuesQuery) parsedQuery).getIndexQuery().toString(), + "mapped_geo_point:" + lat + "," + lon + " +/- " + distanceUnit.toMeters(distance) + " meters" + ); } public void testFromJson() throws IOException { diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java index 1ae201666bc54..d68b0911d3d01 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationProcessorTests.java @@ -8,10 +8,13 @@ package org.opensearch.search.aggregations; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; +import org.opensearch.index.engine.Engine; import org.opensearch.search.aggregations.bucket.global.GlobalAggregator; import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.profile.query.CollectorResult; @@ -20,14 +23,17 @@ import java.util.ArrayList; import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutorService; import org.mockito.ArgumentMatchers; import static org.mockito.ArgumentMatchers.nullable; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; public class AggregationProcessorTests extends AggregationSetupTests { private final AggregationProcessor testAggregationProcessor = new ConcurrentAggregationProcessor(); @@ -152,9 +158,31 @@ private void testPostProcessCommon( globalCollectors.add(context.queryCollectorManagers().get(GlobalAggCollectorManager.class).newCollector()); } } - final ContextIndexSearcher testSearcher = mock(ContextIndexSearcher.class); final IndexSearcher.LeafSlice[] slicesToReturn = new IndexSearcher.LeafSlice[numSlices]; - when(testSearcher.getSlices()).thenReturn(slicesToReturn); + + // Build a ContextIndexSearcher that stubs slices to return slicesToReturn. Slices is protected in IndexReader + // so this builds a real object. The DirectoryReader fetched to build the object is not used for any searches. + final DirectoryReader reader; + try (Engine.Searcher searcher = context.indexShard().acquireSearcher("test")) { + reader = searcher.getDirectoryReader(); + } + ContextIndexSearcher testSearcher = spy( + new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + randomBoolean(), + mock(ExecutorService.class), + context + ) { + @Override + protected LeafSlice[] slices(List<LeafReaderContext> leaves) { + return slicesToReturn; + } + } + ); + ((TestSearchContext) context).setSearcher(testSearcher); AggregationCollectorManager collectorManager; if (expectedNonGlobalAggsPerSlice > 0) { @@ -164,8 +192,8 @@ private void testPostProcessCommon( if (expectedGlobalAggs > 0) { collectorManager = (AggregationCollectorManager) context.queryCollectorManagers().get(GlobalAggCollectorManager.class); ReduceableSearchResult result = collectorManager.reduce(globalCollectors); - when(testSearcher.search(nullable(Query.class), ArgumentMatchers.<CollectorManager<?, ReduceableSearchResult>>any())) - .thenReturn(result); + doReturn(result).when(testSearcher) + .search(nullable(Query.class), ArgumentMatchers.<CollectorManager<?, ReduceableSearchResult>>any()); } assertTrue(context.queryResult().hasAggs()); if (withProfilers) { diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java index 9358fa568db6a..01e259f84660d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldDoc; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopDocs; @@ -356,7 +357,7 @@ private Comparator<ScoreDoc> sortFieldsComparator(SortField[] sortFields) { FieldComparator[] comparators = new FieldComparator[sortFields.length]; for (int i = 0; i < sortFields.length; i++) { // Values passed to getComparator shouldn't matter - comparators[i] = sortFields[i].getComparator(0, false); + comparators[i] = sortFields[i].getComparator(0, Pruning.NONE); } return (lhs, rhs) -> { FieldDoc l = (FieldDoc) lhs; diff --git a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java index b1f70dfce176c..a707c8b34e0a4 100644 --- a/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/opensearch/search/internal/ContextIndexSearcherTests.java @@ -393,8 +393,8 @@ public void testGetSlicesWithNonNullExecutorButCSDisabled() throws Exception { null, searchContext ); - // Case 1: Verify getSlices return null when concurrent segment search is disabled - assertNull(searcher.getSlices()); + // Case 1: Verify getSlices returns not null when concurrent segment search is disabled + assertEquals(1, searcher.getSlices().length); // Case 2: Verify the slice count when custom max slice computation is used searcher = new ContextIndexSearcher( diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 39126a607f968..d0e01c5461c79 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -69,6 +69,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Sort; @@ -659,7 +660,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator<Object> comparator = (FieldComparator<Object>) searchSortAndFormat.sort.getSort()[i].getComparator( 1, - false + Pruning.NONE ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 48ac2d3b5a804..6af04e15acef0 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -35,6 +35,7 @@ import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.Query; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -1086,7 +1087,7 @@ public void testIndexSortScrollOptimization() throws Exception { @SuppressWarnings("unchecked") FieldComparator<Object> comparator = (FieldComparator<Object>) searchSortAndFormat.sort.getSort()[i].getComparator( i, - randomBoolean() + randomFrom(Pruning.values()) ); int cmp = comparator.compareValues(firstDoc.fields[i], lastDoc.fields[i]); if (cmp == 0) { diff --git a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java index cc5c205f7f9fe..53c07674f5bd7 100644 --- a/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/opensearch/search/searchafter/SearchAfterBuilderTests.java @@ -34,6 +34,7 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.search.FieldComparator; +import org.apache.lucene.search.Pruning; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; @@ -279,7 +280,7 @@ public SortField.Type reducedType() { } @Override - public FieldComparator<?> newComparator(String fieldname, int numHits, boolean enableSkipping, boolean reversed) { + public FieldComparator<?> newComparator(String fieldname, int numHits, Pruning pruning, boolean reversed) { return null; } From 439573b36bc82ddb77d37bfa3d0cd6f85c20c0e2 Mon Sep 17 00:00:00 2001 From: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> Date: Wed, 10 Jan 2024 05:49:11 -0800 Subject: [PATCH 073/178] Update skip version to match branches (#11801) Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> From 81924884053f76325389f80118d4876554a6f94a Mon Sep 17 00:00:00 2001 From: Cam <17013462+camerondurham@users.noreply.github.com> Date: Wed, 10 Jan 2024 06:54:41 -0700 Subject: [PATCH 074/178] Single line comments can end in newline or EOF. Fixes #11815 (#11816) * Single line comments can end in newline or EOF. Fixes #11815 Signed-off-by: camerondurham <u64.cam@gmail.com> * Re-add license to autogen parser files Signed-off-by: camerondurham <u64.cam@gmail.com> * Remove noise from changelog Signed-off-by: Cameron Durham <u64.cam@gmail.com> * Accept single-line comments until newline char Signed-off-by: Cameron Durham <u64.cam@gmail.com> --------- Signed-off-by: camerondurham <u64.cam@gmail.com> Signed-off-by: Cameron Durham <u64.cam@gmail.com> --- CHANGELOG.md | 1 + .../src/main/antlr/PainlessLexer.g4 | 2 +- .../painless/antlr/PainlessLexer.java | 748 +++++++++--------- .../org/opensearch/painless/CommentTests.java | 51 ++ 4 files changed, 427 insertions(+), 375 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 8e0317c6d5475..1e00e42e5b756 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -218,6 +218,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) - Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) - Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) +- Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) ### Security diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 21b03b85d8edd..69b789dd2aa25 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -25,7 +25,7 @@ protected abstract boolean isSlashRegex(); } WS: [ \t\n\r]+ -> skip; -COMMENT: ( '//' .*? [\n\r] | '/*' .*? '*/' ) -> skip; +COMMENT: ( '//' ~[\n\r]* | '/*' .*? '*/' ) -> skip; LBRACK: '{'; RBRACK: '}'; diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java index fb33cf6e2a6f5..260a2fc0c062c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/PainlessLexer.java @@ -435,7 +435,7 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { return true; } - public static final String _serializedATN = "\u0004\u0000U\u0278\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007" + public static final String _serializedATN = "\u0004\u0000U\u0277\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007" + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007" + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007" + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n" @@ -459,381 +459,381 @@ private boolean REGEX_sempred(RuleContext _localctx, int predIndex) { + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0001\u0000\u0004\u0000\u00ae\b\u0000" + "\u000b\u0000\f\u0000\u00af\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001" + "\u0001\u0001\u0001\u0001\u0005\u0001\u00b8\b\u0001\n\u0001\f\u0001\u00bb" - + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005" - + "\u0001\u00c2\b\u0001\n\u0001\f\u0001\u00c5\t\u0001\u0001\u0001\u0001\u0001" - + "\u0003\u0001\u00c9\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002" - + "\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005" - + "\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001" - + "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001" - + "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001" - + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001" - + "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001" - + "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001" - + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001" - + "\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001" - + "\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001" - + "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001" - + "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001" - + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001" - + "\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001" - + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001" - + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001" - + "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001" - + "\u001c\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001" - + "\u001e\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001" - + "\"\u0001\"\u0001#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001" - + "%\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001" - + ")\u0001)\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001" - + ",\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u0001" - + "0\u00010\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u0001" - + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001" - + "7\u00018\u00018\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001" - + ":\u0001;\u0001;\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001" - + ">\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001" - + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001" - + "D\u0001E\u0001E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001" - + "G\u0001G\u0004G\u01b8\bG\u000bG\fG\u01b9\u0001G\u0003G\u01bd\bG\u0001" - + "H\u0001H\u0001H\u0004H\u01c2\bH\u000bH\fH\u01c3\u0001H\u0003H\u01c7\b" - + "H\u0001I\u0001I\u0001I\u0005I\u01cc\bI\nI\fI\u01cf\tI\u0003I\u01d1\bI" - + "\u0001I\u0003I\u01d4\bI\u0001J\u0001J\u0001J\u0005J\u01d9\bJ\nJ\fJ\u01dc" - + "\tJ\u0003J\u01de\bJ\u0001J\u0001J\u0004J\u01e2\bJ\u000bJ\fJ\u01e3\u0003" - + "J\u01e6\bJ\u0001J\u0001J\u0003J\u01ea\bJ\u0001J\u0004J\u01ed\bJ\u000b" - + "J\fJ\u01ee\u0003J\u01f1\bJ\u0001J\u0003J\u01f4\bJ\u0001K\u0001K\u0001" - + "K\u0001K\u0001K\u0001K\u0005K\u01fc\bK\nK\fK\u01ff\tK\u0001K\u0001K\u0001" - + "K\u0001K\u0001K\u0001K\u0001K\u0005K\u0208\bK\nK\fK\u020b\tK\u0001K\u0003" - + "K\u020e\bK\u0001L\u0001L\u0001L\u0001L\u0004L\u0214\bL\u000bL\fL\u0215" - + "\u0001L\u0001L\u0005L\u021a\bL\nL\fL\u021d\tL\u0001L\u0001L\u0001M\u0001" - + "M\u0001M\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001" - + "O\u0001O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + + "\t\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u00c1" + + "\b\u0001\n\u0001\f\u0001\u00c4\t\u0001\u0001\u0001\u0001\u0001\u0003\u0001" + + "\u00c8\b\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0003" + + "\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0006" + + "\u0001\u0006\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001" + + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001\u000b\u0001\u000b" + + "\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e" + + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f" + + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010" + + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012" + + "\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012" + + "\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013" + + "\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014" + + "\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015" + + "\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017" + + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018" + + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019" + + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a" + + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a" + + "\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c" + + "\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f" + + "\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0001" + + "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0001%\u0001%\u0001&\u0001" + + "&\u0001&\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001" + + "*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001" + + ",\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00010\u0001" + + "1\u00011\u00011\u00012\u00012\u00013\u00013\u00014\u00014\u00014\u0001" + + "5\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u00017\u00018\u0001" + + "8\u00018\u00018\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001" + + ";\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001>\u0001" + + "?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001A\u0001A\u0001B\u0001" + + "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001D\u0001E\u0001" + + "E\u0001E\u0001E\u0001F\u0001F\u0001F\u0001F\u0001F\u0001G\u0001G\u0004" + + "G\u01b7\bG\u000bG\fG\u01b8\u0001G\u0003G\u01bc\bG\u0001H\u0001H\u0001" + + "H\u0004H\u01c1\bH\u000bH\fH\u01c2\u0001H\u0003H\u01c6\bH\u0001I\u0001" + + "I\u0001I\u0005I\u01cb\bI\nI\fI\u01ce\tI\u0003I\u01d0\bI\u0001I\u0003I" + + "\u01d3\bI\u0001J\u0001J\u0001J\u0005J\u01d8\bJ\nJ\fJ\u01db\tJ\u0003J\u01dd" + + "\bJ\u0001J\u0001J\u0004J\u01e1\bJ\u000bJ\fJ\u01e2\u0003J\u01e5\bJ\u0001" + + "J\u0001J\u0003J\u01e9\bJ\u0001J\u0004J\u01ec\bJ\u000bJ\fJ\u01ed\u0003" + + "J\u01f0\bJ\u0001J\u0003J\u01f3\bJ\u0001K\u0001K\u0001K\u0001K\u0001K\u0001" + + "K\u0005K\u01fb\bK\nK\fK\u01fe\tK\u0001K\u0001K\u0001K\u0001K\u0001K\u0001" + + "K\u0001K\u0005K\u0207\bK\nK\fK\u020a\tK\u0001K\u0003K\u020d\bK\u0001L" + + "\u0001L\u0001L\u0001L\u0004L\u0213\bL\u000bL\fL\u0214\u0001L\u0001L\u0005" + + "L\u0219\bL\nL\fL\u021c\tL\u0001L\u0001L\u0001M\u0001M\u0001M\u0001M\u0001" + + "M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001" + + "O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" - + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001" - + "P\u0001P\u0001P\u0003P\u0257\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001" - + "R\u0005R\u025f\bR\nR\fR\u0262\tR\u0001S\u0001S\u0001S\u0005S\u0267\bS" - + "\nS\fS\u026a\tS\u0003S\u026c\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0272" - + "\bT\nT\fT\u0275\tT\u0001T\u0001T\u0005\u00b9\u00c3\u01fd\u0209\u0215\u0000" - + "U\u0002\u0001\u0004\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007" - + "\u0010\b\u0012\t\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e" - + "\u000f \u0010\"\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u0018" - + "2\u00194\u001a6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P(" - + "R)T*V+X,Z-\\.^/`0b1d2f3h4j5l6n7p8r9t:v;x<z=|>~?\u0080@\u0082A\u0084B\u0086" - + "C\u0088D\u008aE\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009a" - + "M\u009cN\u009eO\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000" - + "\u0001\u0013\u0003\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002" - + "\u0000LLll\u0002\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u0000" - + "09\u0006\u0000DDFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000" - + "DDFFddff\u0002\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002" - + "\u0000\n\n//\u0007\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u00000" - + "9AZ__az\u029e\u0000\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000" - + "\u0000\u0000\u0000\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000" - + "\u0000\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000" - + "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000" - + "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000" - + "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000" - + "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000" - + "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000" - + "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001" - + "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000" - + "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0000" - + "0\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001" - + "\u0000\u0000\u0000\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000" - + "\u0000\u0000:\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000" - + ">\u0001\u0000\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001" - + "\u0000\u0000\u0000\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000" - + "\u0000\u0000H\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000" - + "L\u0001\u0000\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001" - + "\u0000\u0000\u0000\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000" - + "\u0000\u0000V\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000" - + "Z\u0001\u0000\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001" - + "\u0000\u0000\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000" - + "\u0000\u0000d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000" - + "h\u0001\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001" - + "\u0000\u0000\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000" - + "\u0000\u0000r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000" - + "v\u0001\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001" - + "\u0000\u0000\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000" - + "\u0000\u0000\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000" - + "\u0000\u0000\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000" - + "\u0000\u0000\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000" - + "\u0000\u0000\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000" - + "\u0000\u0000\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000" - + "\u0000\u0000\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000" - + "\u0000\u0000\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000" - + "\u0000\u0000\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000" - + "\u0000\u0000\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000" - + "\u0000\u0000\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000" - + "\u0000\u0001\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000" - + "\u0000\u0002\u00ad\u0001\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000" - + "\u0000\u0006\u00cc\u0001\u0000\u0000\u0000\b\u00ce\u0001\u0000\u0000\u0000" - + "\n\u00d0\u0001\u0000\u0000\u0000\f\u00d2\u0001\u0000\u0000\u0000\u000e" - + "\u00d4\u0001\u0000\u0000\u0000\u0010\u00d6\u0001\u0000\u0000\u0000\u0012" - + "\u00d8\u0001\u0000\u0000\u0000\u0014\u00dc\u0001\u0000\u0000\u0000\u0016" - + "\u00e1\u0001\u0000\u0000\u0000\u0018\u00e3\u0001\u0000\u0000\u0000\u001a" - + "\u00e5\u0001\u0000\u0000\u0000\u001c\u00e8\u0001\u0000\u0000\u0000\u001e" - + "\u00eb\u0001\u0000\u0000\u0000 \u00f0\u0001\u0000\u0000\u0000\"\u00f6" - + "\u0001\u0000\u0000\u0000$\u00f9\u0001\u0000\u0000\u0000&\u00fd\u0001\u0000" - + "\u0000\u0000(\u0106\u0001\u0000\u0000\u0000*\u010c\u0001\u0000\u0000\u0000" - + ",\u0113\u0001\u0000\u0000\u0000.\u0117\u0001\u0000\u0000\u00000\u011b" - + "\u0001\u0000\u0000\u00002\u0121\u0001\u0000\u0000\u00004\u0127\u0001\u0000" - + "\u0000\u00006\u012c\u0001\u0000\u0000\u00008\u0137\u0001\u0000\u0000\u0000" - + ":\u0139\u0001\u0000\u0000\u0000<\u013b\u0001\u0000\u0000\u0000>\u013d" - + "\u0001\u0000\u0000\u0000@\u0140\u0001\u0000\u0000\u0000B\u0142\u0001\u0000" - + "\u0000\u0000D\u0144\u0001\u0000\u0000\u0000F\u0146\u0001\u0000\u0000\u0000" - + "H\u0149\u0001\u0000\u0000\u0000J\u014c\u0001\u0000\u0000\u0000L\u0150" - + "\u0001\u0000\u0000\u0000N\u0152\u0001\u0000\u0000\u0000P\u0155\u0001\u0000" - + "\u0000\u0000R\u0157\u0001\u0000\u0000\u0000T\u015a\u0001\u0000\u0000\u0000" - + "V\u015d\u0001\u0000\u0000\u0000X\u0161\u0001\u0000\u0000\u0000Z\u0164" - + "\u0001\u0000\u0000\u0000\\\u0168\u0001\u0000\u0000\u0000^\u016a\u0001" - + "\u0000\u0000\u0000`\u016c\u0001\u0000\u0000\u0000b\u016e\u0001\u0000\u0000" - + "\u0000d\u0171\u0001\u0000\u0000\u0000f\u0174\u0001\u0000\u0000\u0000h" - + "\u0176\u0001\u0000\u0000\u0000j\u0178\u0001\u0000\u0000\u0000l\u017b\u0001" - + "\u0000\u0000\u0000n\u017e\u0001\u0000\u0000\u0000p\u0181\u0001\u0000\u0000" - + "\u0000r\u0184\u0001\u0000\u0000\u0000t\u0188\u0001\u0000\u0000\u0000v" - + "\u018b\u0001\u0000\u0000\u0000x\u018e\u0001\u0000\u0000\u0000z\u0190\u0001" - + "\u0000\u0000\u0000|\u0193\u0001\u0000\u0000\u0000~\u0196\u0001\u0000\u0000" - + "\u0000\u0080\u0199\u0001\u0000\u0000\u0000\u0082\u019c\u0001\u0000\u0000" - + "\u0000\u0084\u019f\u0001\u0000\u0000\u0000\u0086\u01a2\u0001\u0000\u0000" - + "\u0000\u0088\u01a5\u0001\u0000\u0000\u0000\u008a\u01a8\u0001\u0000\u0000" - + "\u0000\u008c\u01ac\u0001\u0000\u0000\u0000\u008e\u01b0\u0001\u0000\u0000" - + "\u0000\u0090\u01b5\u0001\u0000\u0000\u0000\u0092\u01be\u0001\u0000\u0000" - + "\u0000\u0094\u01d0\u0001\u0000\u0000\u0000\u0096\u01dd\u0001\u0000\u0000" - + "\u0000\u0098\u020d\u0001\u0000\u0000\u0000\u009a\u020f\u0001\u0000\u0000" - + "\u0000\u009c\u0220\u0001\u0000\u0000\u0000\u009e\u0225\u0001\u0000\u0000" - + "\u0000\u00a0\u022b\u0001\u0000\u0000\u0000\u00a2\u0256\u0001\u0000\u0000" - + "\u0000\u00a4\u0258\u0001\u0000\u0000\u0000\u00a6\u025c\u0001\u0000\u0000" - + "\u0000\u00a8\u026b\u0001\u0000\u0000\u0000\u00aa\u026f\u0001\u0000\u0000" - + "\u0000\u00ac\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000" - + "\u0000\u00ae\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000" - + "\u0000\u00af\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000" - + "\u0000\u00b1\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000" - + "\u0000\u00b3\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5" - + "\u00b9\u0001\u0000\u0000\u0000\u00b6\u00b8\t\u0000\u0000\u0000\u00b7\u00b6" - + "\u0001\u0000\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00ba" - + "\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000\u0000\u0000\u00ba\u00bc" - + "\u0001\u0000\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00c9" - + "\u0007\u0001\u0000\u0000\u00bd\u00be\u0005/\u0000\u0000\u00be\u00bf\u0005" - + "*\u0000\u0000\u00bf\u00c3\u0001\u0000\u0000\u0000\u00c0\u00c2\t\u0000" - + "\u0000\u0000\u00c1\u00c0\u0001\u0000\u0000\u0000\u00c2\u00c5\u0001\u0000" - + "\u0000\u0000\u00c3\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001\u0000" - + "\u0000\u0000\u00c4\u00c6\u0001\u0000\u0000\u0000\u00c5\u00c3\u0001\u0000" - + "\u0000\u0000\u00c6\u00c7\u0005*\u0000\u0000\u00c7\u00c9\u0005/\u0000\u0000" - + "\u00c8\u00b3\u0001\u0000\u0000\u0000\u00c8\u00bd\u0001\u0000\u0000\u0000" - + "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0006\u0001\u0000\u0000" - + "\u00cb\u0005\u0001\u0000\u0000\u0000\u00cc\u00cd\u0005{\u0000\u0000\u00cd" - + "\u0007\u0001\u0000\u0000\u0000\u00ce\u00cf\u0005}\u0000\u0000\u00cf\t" - + "\u0001\u0000\u0000\u0000\u00d0\u00d1\u0005[\u0000\u0000\u00d1\u000b\u0001" - + "\u0000\u0000\u0000\u00d2\u00d3\u0005]\u0000\u0000\u00d3\r\u0001\u0000" - + "\u0000\u0000\u00d4\u00d5\u0005(\u0000\u0000\u00d5\u000f\u0001\u0000\u0000" - + "\u0000\u00d6\u00d7\u0005)\u0000\u0000\u00d7\u0011\u0001\u0000\u0000\u0000" - + "\u00d8\u00d9\u0005.\u0000\u0000\u00d9\u00da\u0001\u0000\u0000\u0000\u00da" - + "\u00db\u0006\b\u0001\u0000\u00db\u0013\u0001\u0000\u0000\u0000\u00dc\u00dd" - + "\u0005?\u0000\u0000\u00dd\u00de\u0005.\u0000\u0000\u00de\u00df\u0001\u0000" - + "\u0000\u0000\u00df\u00e0\u0006\t\u0001\u0000\u00e0\u0015\u0001\u0000\u0000" - + "\u0000\u00e1\u00e2\u0005,\u0000\u0000\u00e2\u0017\u0001\u0000\u0000\u0000" - + "\u00e3\u00e4\u0005;\u0000\u0000\u00e4\u0019\u0001\u0000\u0000\u0000\u00e5" - + "\u00e6\u0005i\u0000\u0000\u00e6\u00e7\u0005f\u0000\u0000\u00e7\u001b\u0001" - + "\u0000\u0000\u0000\u00e8\u00e9\u0005i\u0000\u0000\u00e9\u00ea\u0005n\u0000" - + "\u0000\u00ea\u001d\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005e\u0000\u0000" - + "\u00ec\u00ed\u0005l\u0000\u0000\u00ed\u00ee\u0005s\u0000\u0000\u00ee\u00ef" - + "\u0005e\u0000\u0000\u00ef\u001f\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005" - + "w\u0000\u0000\u00f1\u00f2\u0005h\u0000\u0000\u00f2\u00f3\u0005i\u0000" - + "\u0000\u00f3\u00f4\u0005l\u0000\u0000\u00f4\u00f5\u0005e\u0000\u0000\u00f5" - + "!\u0001\u0000\u0000\u0000\u00f6\u00f7\u0005d\u0000\u0000\u00f7\u00f8\u0005" - + "o\u0000\u0000\u00f8#\u0001\u0000\u0000\u0000\u00f9\u00fa\u0005f\u0000" - + "\u0000\u00fa\u00fb\u0005o\u0000\u0000\u00fb\u00fc\u0005r\u0000\u0000\u00fc" - + "%\u0001\u0000\u0000\u0000\u00fd\u00fe\u0005c\u0000\u0000\u00fe\u00ff\u0005" - + "o\u0000\u0000\u00ff\u0100\u0005n\u0000\u0000\u0100\u0101\u0005t\u0000" - + "\u0000\u0101\u0102\u0005i\u0000\u0000\u0102\u0103\u0005n\u0000\u0000\u0103" - + "\u0104\u0005u\u0000\u0000\u0104\u0105\u0005e\u0000\u0000\u0105\'\u0001" - + "\u0000\u0000\u0000\u0106\u0107\u0005b\u0000\u0000\u0107\u0108\u0005r\u0000" - + "\u0000\u0108\u0109\u0005e\u0000\u0000\u0109\u010a\u0005a\u0000\u0000\u010a" - + "\u010b\u0005k\u0000\u0000\u010b)\u0001\u0000\u0000\u0000\u010c\u010d\u0005" - + "r\u0000\u0000\u010d\u010e\u0005e\u0000\u0000\u010e\u010f\u0005t\u0000" - + "\u0000\u010f\u0110\u0005u\u0000\u0000\u0110\u0111\u0005r\u0000\u0000\u0111" - + "\u0112\u0005n\u0000\u0000\u0112+\u0001\u0000\u0000\u0000\u0113\u0114\u0005" - + "n\u0000\u0000\u0114\u0115\u0005e\u0000\u0000\u0115\u0116\u0005w\u0000" - + "\u0000\u0116-\u0001\u0000\u0000\u0000\u0117\u0118\u0005t\u0000\u0000\u0118" - + "\u0119\u0005r\u0000\u0000\u0119\u011a\u0005y\u0000\u0000\u011a/\u0001" - + "\u0000\u0000\u0000\u011b\u011c\u0005c\u0000\u0000\u011c\u011d\u0005a\u0000" - + "\u0000\u011d\u011e\u0005t\u0000\u0000\u011e\u011f\u0005c\u0000\u0000\u011f" - + "\u0120\u0005h\u0000\u0000\u01201\u0001\u0000\u0000\u0000\u0121\u0122\u0005" - + "t\u0000\u0000\u0122\u0123\u0005h\u0000\u0000\u0123\u0124\u0005r\u0000" - + "\u0000\u0124\u0125\u0005o\u0000\u0000\u0125\u0126\u0005w\u0000\u0000\u0126" - + "3\u0001\u0000\u0000\u0000\u0127\u0128\u0005t\u0000\u0000\u0128\u0129\u0005" - + "h\u0000\u0000\u0129\u012a\u0005i\u0000\u0000\u012a\u012b\u0005s\u0000" - + "\u0000\u012b5\u0001\u0000\u0000\u0000\u012c\u012d\u0005i\u0000\u0000\u012d" - + "\u012e\u0005n\u0000\u0000\u012e\u012f\u0005s\u0000\u0000\u012f\u0130\u0005" - + "t\u0000\u0000\u0130\u0131\u0005a\u0000\u0000\u0131\u0132\u0005n\u0000" - + "\u0000\u0132\u0133\u0005c\u0000\u0000\u0133\u0134\u0005e\u0000\u0000\u0134" - + "\u0135\u0005o\u0000\u0000\u0135\u0136\u0005f\u0000\u0000\u01367\u0001" - + "\u0000\u0000\u0000\u0137\u0138\u0005!\u0000\u0000\u01389\u0001\u0000\u0000" - + "\u0000\u0139\u013a\u0005~\u0000\u0000\u013a;\u0001\u0000\u0000\u0000\u013b" - + "\u013c\u0005*\u0000\u0000\u013c=\u0001\u0000\u0000\u0000\u013d\u013e\u0005" - + "/\u0000\u0000\u013e\u013f\u0004\u001e\u0000\u0000\u013f?\u0001\u0000\u0000" - + "\u0000\u0140\u0141\u0005%\u0000\u0000\u0141A\u0001\u0000\u0000\u0000\u0142" - + "\u0143\u0005+\u0000\u0000\u0143C\u0001\u0000\u0000\u0000\u0144\u0145\u0005" - + "-\u0000\u0000\u0145E\u0001\u0000\u0000\u0000\u0146\u0147\u0005<\u0000" - + "\u0000\u0147\u0148\u0005<\u0000\u0000\u0148G\u0001\u0000\u0000\u0000\u0149" - + "\u014a\u0005>\u0000\u0000\u014a\u014b\u0005>\u0000\u0000\u014bI\u0001" - + "\u0000\u0000\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000" - + "\u0000\u014e\u014f\u0005>\u0000\u0000\u014fK\u0001\u0000\u0000\u0000\u0150" - + "\u0151\u0005<\u0000\u0000\u0151M\u0001\u0000\u0000\u0000\u0152\u0153\u0005" - + "<\u0000\u0000\u0153\u0154\u0005=\u0000\u0000\u0154O\u0001\u0000\u0000" - + "\u0000\u0155\u0156\u0005>\u0000\u0000\u0156Q\u0001\u0000\u0000\u0000\u0157" - + "\u0158\u0005>\u0000\u0000\u0158\u0159\u0005=\u0000\u0000\u0159S\u0001" - + "\u0000\u0000\u0000\u015a\u015b\u0005=\u0000\u0000\u015b\u015c\u0005=\u0000" - + "\u0000\u015cU\u0001\u0000\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e" - + "\u015f\u0005=\u0000\u0000\u015f\u0160\u0005=\u0000\u0000\u0160W\u0001" - + "\u0000\u0000\u0000\u0161\u0162\u0005!\u0000\u0000\u0162\u0163\u0005=\u0000" - + "\u0000\u0163Y\u0001\u0000\u0000\u0000\u0164\u0165\u0005!\u0000\u0000\u0165" - + "\u0166\u0005=\u0000\u0000\u0166\u0167\u0005=\u0000\u0000\u0167[\u0001" - + "\u0000\u0000\u0000\u0168\u0169\u0005&\u0000\u0000\u0169]\u0001\u0000\u0000" - + "\u0000\u016a\u016b\u0005^\u0000\u0000\u016b_\u0001\u0000\u0000\u0000\u016c" - + "\u016d\u0005|\u0000\u0000\u016da\u0001\u0000\u0000\u0000\u016e\u016f\u0005" - + "&\u0000\u0000\u016f\u0170\u0005&\u0000\u0000\u0170c\u0001\u0000\u0000" - + "\u0000\u0171\u0172\u0005|\u0000\u0000\u0172\u0173\u0005|\u0000\u0000\u0173" - + "e\u0001\u0000\u0000\u0000\u0174\u0175\u0005?\u0000\u0000\u0175g\u0001" - + "\u0000\u0000\u0000\u0176\u0177\u0005:\u0000\u0000\u0177i\u0001\u0000\u0000" - + "\u0000\u0178\u0179\u0005?\u0000\u0000\u0179\u017a\u0005:\u0000\u0000\u017a" - + "k\u0001\u0000\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017c\u017d\u0005" - + ":\u0000\u0000\u017dm\u0001\u0000\u0000\u0000\u017e\u017f\u0005-\u0000" - + "\u0000\u017f\u0180\u0005>\u0000\u0000\u0180o\u0001\u0000\u0000\u0000\u0181" - + "\u0182\u0005=\u0000\u0000\u0182\u0183\u0005~\u0000\u0000\u0183q\u0001" - + "\u0000\u0000\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005=\u0000" - + "\u0000\u0186\u0187\u0005~\u0000\u0000\u0187s\u0001\u0000\u0000\u0000\u0188" - + "\u0189\u0005+\u0000\u0000\u0189\u018a\u0005+\u0000\u0000\u018au\u0001" - + "\u0000\u0000\u0000\u018b\u018c\u0005-\u0000\u0000\u018c\u018d\u0005-\u0000" - + "\u0000\u018dw\u0001\u0000\u0000\u0000\u018e\u018f\u0005=\u0000\u0000\u018f" - + "y\u0001\u0000\u0000\u0000\u0190\u0191\u0005+\u0000\u0000\u0191\u0192\u0005" - + "=\u0000\u0000\u0192{\u0001\u0000\u0000\u0000\u0193\u0194\u0005-\u0000" - + "\u0000\u0194\u0195\u0005=\u0000\u0000\u0195}\u0001\u0000\u0000\u0000\u0196" - + "\u0197\u0005*\u0000\u0000\u0197\u0198\u0005=\u0000\u0000\u0198\u007f\u0001" - + "\u0000\u0000\u0000\u0199\u019a\u0005/\u0000\u0000\u019a\u019b\u0005=\u0000" - + "\u0000\u019b\u0081\u0001\u0000\u0000\u0000\u019c\u019d\u0005%\u0000\u0000" - + "\u019d\u019e\u0005=\u0000\u0000\u019e\u0083\u0001\u0000\u0000\u0000\u019f" - + "\u01a0\u0005&\u0000\u0000\u01a0\u01a1\u0005=\u0000\u0000\u01a1\u0085\u0001" - + "\u0000\u0000\u0000\u01a2\u01a3\u0005^\u0000\u0000\u01a3\u01a4\u0005=\u0000" - + "\u0000\u01a4\u0087\u0001\u0000\u0000\u0000\u01a5\u01a6\u0005|\u0000\u0000" - + "\u01a6\u01a7\u0005=\u0000\u0000\u01a7\u0089\u0001\u0000\u0000\u0000\u01a8" - + "\u01a9\u0005<\u0000\u0000\u01a9\u01aa\u0005<\u0000\u0000\u01aa\u01ab\u0005" - + "=\u0000\u0000\u01ab\u008b\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005>\u0000" - + "\u0000\u01ad\u01ae\u0005>\u0000\u0000\u01ae\u01af\u0005=\u0000\u0000\u01af" - + "\u008d\u0001\u0000\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2" - + "\u0005>\u0000\u0000\u01b2\u01b3\u0005>\u0000\u0000\u01b3\u01b4\u0005=" - + "\u0000\u0000\u01b4\u008f\u0001\u0000\u0000\u0000\u01b5\u01b7\u00050\u0000" - + "\u0000\u01b6\u01b8\u0007\u0002\u0000\u0000\u01b7\u01b6\u0001\u0000\u0000" - + "\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9\u01b7\u0001\u0000\u0000" - + "\u0000\u01b9\u01ba\u0001\u0000\u0000\u0000\u01ba\u01bc\u0001\u0000\u0000" - + "\u0000\u01bb\u01bd\u0007\u0003\u0000\u0000\u01bc\u01bb\u0001\u0000\u0000" - + "\u0000\u01bc\u01bd\u0001\u0000\u0000\u0000\u01bd\u0091\u0001\u0000\u0000" - + "\u0000\u01be\u01bf\u00050\u0000\u0000\u01bf\u01c1\u0007\u0004\u0000\u0000" - + "\u01c0\u01c2\u0007\u0005\u0000\u0000\u01c1\u01c0\u0001\u0000\u0000\u0000" - + "\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001\u0000\u0000\u0000" - + "\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c6\u0001\u0000\u0000\u0000" - + "\u01c5\u01c7\u0007\u0003\u0000\u0000\u01c6\u01c5\u0001\u0000\u0000\u0000" - + "\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7\u0093\u0001\u0000\u0000\u0000" - + "\u01c8\u01d1\u00050\u0000\u0000\u01c9\u01cd\u0007\u0006\u0000\u0000\u01ca" - + "\u01cc\u0007\u0007\u0000\u0000\u01cb\u01ca\u0001\u0000\u0000\u0000\u01cc" - + "\u01cf\u0001\u0000\u0000\u0000\u01cd\u01cb\u0001\u0000\u0000\u0000\u01cd" - + "\u01ce\u0001\u0000\u0000\u0000\u01ce\u01d1\u0001\u0000\u0000\u0000\u01cf" - + "\u01cd\u0001\u0000\u0000\u0000\u01d0\u01c8\u0001\u0000\u0000\u0000\u01d0" - + "\u01c9\u0001\u0000\u0000\u0000\u01d1\u01d3\u0001\u0000\u0000\u0000\u01d2" - + "\u01d4\u0007\b\u0000\u0000\u01d3\u01d2\u0001\u0000\u0000\u0000\u01d3\u01d4" - + "\u0001\u0000\u0000\u0000\u01d4\u0095\u0001\u0000\u0000\u0000\u01d5\u01de" - + "\u00050\u0000\u0000\u01d6\u01da\u0007\u0006\u0000\u0000\u01d7\u01d9\u0007" - + "\u0007\u0000\u0000\u01d8\u01d7\u0001\u0000\u0000\u0000\u01d9\u01dc\u0001" - + "\u0000\u0000\u0000\u01da\u01d8\u0001\u0000\u0000\u0000\u01da\u01db\u0001" - + "\u0000\u0000\u0000\u01db\u01de\u0001\u0000\u0000\u0000\u01dc\u01da\u0001" - + "\u0000\u0000\u0000\u01dd\u01d5\u0001\u0000\u0000\u0000\u01dd\u01d6\u0001" - + "\u0000\u0000\u0000\u01de\u01e5\u0001\u0000\u0000\u0000\u01df\u01e1\u0003" - + "\u0012\b\u0000\u01e0\u01e2\u0007\u0007\u0000\u0000\u01e1\u01e0\u0001\u0000" - + "\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e1\u0001\u0000" - + "\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000\u0000\u01e4\u01e6\u0001\u0000" - + "\u0000\u0000\u01e5\u01df\u0001\u0000\u0000\u0000\u01e5\u01e6\u0001\u0000" - + "\u0000\u0000\u01e6\u01f0\u0001\u0000\u0000\u0000\u01e7\u01e9\u0007\t\u0000" - + "\u0000\u01e8\u01ea\u0007\n\u0000\u0000\u01e9\u01e8\u0001\u0000\u0000\u0000" - + "\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01ec\u0001\u0000\u0000\u0000" - + "\u01eb\u01ed\u0007\u0007\u0000\u0000\u01ec\u01eb\u0001\u0000\u0000\u0000" - + "\u01ed\u01ee\u0001\u0000\u0000\u0000\u01ee\u01ec\u0001\u0000\u0000\u0000" - + "\u01ee\u01ef\u0001\u0000\u0000\u0000\u01ef\u01f1\u0001\u0000\u0000\u0000" - + "\u01f0\u01e7\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000" - + "\u01f1\u01f3\u0001\u0000\u0000\u0000\u01f2\u01f4\u0007\u000b\u0000\u0000" - + "\u01f3\u01f2\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000" - + "\u01f4\u0097\u0001\u0000\u0000\u0000\u01f5\u01fd\u0005\"\u0000\u0000\u01f6" - + "\u01f7\u0005\\\u0000\u0000\u01f7\u01fc\u0005\"\u0000\u0000\u01f8\u01f9" - + "\u0005\\\u0000\u0000\u01f9\u01fc\u0005\\\u0000\u0000\u01fa\u01fc\b\f\u0000" - + "\u0000\u01fb\u01f6\u0001\u0000\u0000\u0000\u01fb\u01f8\u0001\u0000\u0000" - + "\u0000\u01fb\u01fa\u0001\u0000\u0000\u0000\u01fc\u01ff\u0001\u0000\u0000" - + "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fd\u01fb\u0001\u0000\u0000" - + "\u0000\u01fe\u0200\u0001\u0000\u0000\u0000\u01ff\u01fd\u0001\u0000\u0000" - + "\u0000\u0200\u020e\u0005\"\u0000\u0000\u0201\u0209\u0005\'\u0000\u0000" - + "\u0202\u0203\u0005\\\u0000\u0000\u0203\u0208\u0005\'\u0000\u0000\u0204" - + "\u0205\u0005\\\u0000\u0000\u0205\u0208\u0005\\\u0000\u0000\u0206\u0208" - + "\b\r\u0000\u0000\u0207\u0202\u0001\u0000\u0000\u0000\u0207\u0204\u0001" - + "\u0000\u0000\u0000\u0207\u0206\u0001\u0000\u0000\u0000\u0208\u020b\u0001" - + "\u0000\u0000\u0000\u0209\u020a\u0001\u0000\u0000\u0000\u0209\u0207\u0001" - + "\u0000\u0000\u0000\u020a\u020c\u0001\u0000\u0000\u0000\u020b\u0209\u0001" - + "\u0000\u0000\u0000\u020c\u020e\u0005\'\u0000\u0000\u020d\u01f5\u0001\u0000" - + "\u0000\u0000\u020d\u0201\u0001\u0000\u0000\u0000\u020e\u0099\u0001\u0000" - + "\u0000\u0000\u020f\u0213\u0005/\u0000\u0000\u0210\u0211\u0005\\\u0000" - + "\u0000\u0211\u0214\b\u000e\u0000\u0000\u0212\u0214\b\u000f\u0000\u0000" - + "\u0213\u0210\u0001\u0000\u0000\u0000\u0213\u0212\u0001\u0000\u0000\u0000" - + "\u0214\u0215\u0001\u0000\u0000\u0000\u0215\u0216\u0001\u0000\u0000\u0000" - + "\u0215\u0213\u0001\u0000\u0000\u0000\u0216\u0217\u0001\u0000\u0000\u0000" - + "\u0217\u021b\u0005/\u0000\u0000\u0218\u021a\u0007\u0010\u0000\u0000\u0219" - + "\u0218\u0001\u0000\u0000\u0000\u021a\u021d\u0001\u0000\u0000\u0000\u021b" - + "\u0219\u0001\u0000\u0000\u0000\u021b\u021c\u0001\u0000\u0000\u0000\u021c" - + "\u021e\u0001\u0000\u0000\u0000\u021d\u021b\u0001\u0000\u0000\u0000\u021e" - + "\u021f\u0004L\u0001\u0000\u021f\u009b\u0001\u0000\u0000\u0000\u0220\u0221" - + "\u0005t\u0000\u0000\u0221\u0222\u0005r\u0000\u0000\u0222\u0223\u0005u" - + "\u0000\u0000\u0223\u0224\u0005e\u0000\u0000\u0224\u009d\u0001\u0000\u0000" - + "\u0000\u0225\u0226\u0005f\u0000\u0000\u0226\u0227\u0005a\u0000\u0000\u0227" - + "\u0228\u0005l\u0000\u0000\u0228\u0229\u0005s\u0000\u0000\u0229\u022a\u0005" - + "e\u0000\u0000\u022a\u009f\u0001\u0000\u0000\u0000\u022b\u022c\u0005n\u0000" - + "\u0000\u022c\u022d\u0005u\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e" - + "\u022f\u0005l\u0000\u0000\u022f\u00a1\u0001\u0000\u0000\u0000\u0230\u0231" - + "\u0005b\u0000\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005o" - + "\u0000\u0000\u0233\u0234\u0005l\u0000\u0000\u0234\u0235\u0005e\u0000\u0000" - + "\u0235\u0236\u0005a\u0000\u0000\u0236\u0257\u0005n\u0000\u0000\u0237\u0238" - + "\u0005b\u0000\u0000\u0238\u0239\u0005y\u0000\u0000\u0239\u023a\u0005t" - + "\u0000\u0000\u023a\u0257\u0005e\u0000\u0000\u023b\u023c\u0005s\u0000\u0000" - + "\u023c\u023d\u0005h\u0000\u0000\u023d\u023e\u0005o\u0000\u0000\u023e\u023f" - + "\u0005r\u0000\u0000\u023f\u0257\u0005t\u0000\u0000\u0240\u0241\u0005c" - + "\u0000\u0000\u0241\u0242\u0005h\u0000\u0000\u0242\u0243\u0005a\u0000\u0000" - + "\u0243\u0257\u0005r\u0000\u0000\u0244\u0245\u0005i\u0000\u0000\u0245\u0246" - + "\u0005n\u0000\u0000\u0246\u0257\u0005t\u0000\u0000\u0247\u0248\u0005l" - + "\u0000\u0000\u0248\u0249\u0005o\u0000\u0000\u0249\u024a\u0005n\u0000\u0000" - + "\u024a\u0257\u0005g\u0000\u0000\u024b\u024c\u0005f\u0000\u0000\u024c\u024d" - + "\u0005l\u0000\u0000\u024d\u024e\u0005o\u0000\u0000\u024e\u024f\u0005a" - + "\u0000\u0000\u024f\u0257\u0005t\u0000\u0000\u0250\u0251\u0005d\u0000\u0000" - + "\u0251\u0252\u0005o\u0000\u0000\u0252\u0253\u0005u\u0000\u0000\u0253\u0254" - + "\u0005b\u0000\u0000\u0254\u0255\u0005l\u0000\u0000\u0255\u0257\u0005e" - + "\u0000\u0000\u0256\u0230\u0001\u0000\u0000\u0000\u0256\u0237\u0001\u0000" - + "\u0000\u0000\u0256\u023b\u0001\u0000\u0000\u0000\u0256\u0240\u0001\u0000" - + "\u0000\u0000\u0256\u0244\u0001\u0000\u0000\u0000\u0256\u0247\u0001\u0000" - + "\u0000\u0000\u0256\u024b\u0001\u0000\u0000\u0000\u0256\u0250\u0001\u0000" - + "\u0000\u0000\u0257\u00a3\u0001\u0000\u0000\u0000\u0258\u0259\u0005d\u0000" - + "\u0000\u0259\u025a\u0005e\u0000\u0000\u025a\u025b\u0005f\u0000\u0000\u025b" - + "\u00a5\u0001\u0000\u0000\u0000\u025c\u0260\u0007\u0011\u0000\u0000\u025d" - + "\u025f\u0007\u0012\u0000\u0000\u025e\u025d\u0001\u0000\u0000\u0000\u025f" - + "\u0262\u0001\u0000\u0000\u0000\u0260\u025e\u0001\u0000\u0000\u0000\u0260" - + "\u0261\u0001\u0000\u0000\u0000\u0261\u00a7\u0001\u0000\u0000\u0000\u0262" - + "\u0260\u0001\u0000\u0000\u0000\u0263\u026c\u00050\u0000\u0000\u0264\u0268" - + "\u0007\u0006\u0000\u0000\u0265\u0267\u0007\u0007\u0000\u0000\u0266\u0265" - + "\u0001\u0000\u0000\u0000\u0267\u026a\u0001\u0000\u0000\u0000\u0268\u0266" - + "\u0001\u0000\u0000\u0000\u0268\u0269\u0001\u0000\u0000\u0000\u0269\u026c" - + "\u0001\u0000\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026b\u0263" - + "\u0001\u0000\u0000\u0000\u026b\u0264\u0001\u0000\u0000\u0000\u026c\u026d" - + "\u0001\u0000\u0000\u0000\u026d\u026e\u0006S\u0002\u0000\u026e\u00a9\u0001" - + "\u0000\u0000\u0000\u026f\u0273\u0007\u0011\u0000\u0000\u0270\u0272\u0007" - + "\u0012\u0000\u0000\u0271\u0270\u0001\u0000\u0000\u0000\u0272\u0275\u0001" - + "\u0000\u0000\u0000\u0273\u0271\u0001\u0000\u0000\u0000\u0273\u0274\u0001" - + "\u0000\u0000\u0000\u0274\u0276\u0001\u0000\u0000\u0000\u0275\u0273\u0001" - + "\u0000\u0000\u0000\u0276\u0277\u0006T\u0002\u0000\u0277\u00ab\u0001\u0000" - + "\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c3\u00c8\u01b9\u01bc\u01c3\u01c6" - + "\u01cd\u01d0\u01d3\u01da\u01dd\u01e3\u01e5\u01e9\u01ee\u01f0\u01f3\u01fb" - + "\u01fd\u0207\u0209\u020d\u0213\u0215\u021b\u0256\u0260\u0268\u026b\u0273" - + "\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002\u0000\u0000"; + + "P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0003" + + "P\u0256\bP\u0001Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0005R\u025e\bR\n" + + "R\fR\u0261\tR\u0001S\u0001S\u0001S\u0005S\u0266\bS\nS\fS\u0269\tS\u0003" + + "S\u026b\bS\u0001S\u0001S\u0001T\u0001T\u0005T\u0271\bT\nT\fT\u0274\tT" + + "\u0001T\u0001T\u0004\u00c2\u01fc\u0208\u0214\u0000U\u0002\u0001\u0004" + + "\u0002\u0006\u0003\b\u0004\n\u0005\f\u0006\u000e\u0007\u0010\b\u0012\t" + + "\u0014\n\u0016\u000b\u0018\f\u001a\r\u001c\u000e\u001e\u000f \u0010\"" + + "\u0011$\u0012&\u0013(\u0014*\u0015,\u0016.\u00170\u00182\u00194\u001a" + + "6\u001b8\u001c:\u001d<\u001e>\u001f@ B!D\"F#H$J%L&N\'P(R)T*V+X,Z-\\.^" + + "/`0b1d2f3h4j5l6n7p8r9t:v;x<z=|>~?\u0080@\u0082A\u0084B\u0086C\u0088D\u008a" + + "E\u008cF\u008eG\u0090H\u0092I\u0094J\u0096K\u0098L\u009aM\u009cN\u009e" + + "O\u00a0P\u00a2Q\u00a4R\u00a6S\u00a8T\u00aaU\u0002\u0000\u0001\u0013\u0003" + + "\u0000\t\n\r\r \u0002\u0000\n\n\r\r\u0001\u000007\u0002\u0000LLll\u0002" + + "\u0000XXxx\u0003\u000009AFaf\u0001\u000019\u0001\u000009\u0006\u0000D" + + "DFFLLddffll\u0002\u0000EEee\u0002\u0000++--\u0004\u0000DDFFddff\u0002" + + "\u0000\"\"\\\\\u0002\u0000\'\'\\\\\u0001\u0000\n\n\u0002\u0000\n\n//\u0007" + + "\u0000UUcciilmssuuxx\u0003\u0000AZ__az\u0004\u000009AZ__az\u029d\u0000" + + "\u0002\u0001\u0000\u0000\u0000\u0000\u0004\u0001\u0000\u0000\u0000\u0000" + + "\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000\u0000\u0000\n" + + "\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000\u000e\u0001" + + "\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001" + + "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001" + + "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001" + + "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001" + + "\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\"\u0001\u0000" + + "\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000\u0000\u0000" + + "\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000\u0000," + + "\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000\u0001\u0000" + + "\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000\u0000\u0000" + + "\u00006\u0001\u0000\u0000\u0000\u00008\u0001\u0000\u0000\u0000\u0000:" + + "\u0001\u0000\u0000\u0000\u0000<\u0001\u0000\u0000\u0000\u0000>\u0001\u0000" + + "\u0000\u0000\u0000@\u0001\u0000\u0000\u0000\u0000B\u0001\u0000\u0000\u0000" + + "\u0000D\u0001\u0000\u0000\u0000\u0000F\u0001\u0000\u0000\u0000\u0000H" + + "\u0001\u0000\u0000\u0000\u0000J\u0001\u0000\u0000\u0000\u0000L\u0001\u0000" + + "\u0000\u0000\u0000N\u0001\u0000\u0000\u0000\u0000P\u0001\u0000\u0000\u0000" + + "\u0000R\u0001\u0000\u0000\u0000\u0000T\u0001\u0000\u0000\u0000\u0000V" + + "\u0001\u0000\u0000\u0000\u0000X\u0001\u0000\u0000\u0000\u0000Z\u0001\u0000" + + "\u0000\u0000\u0000\\\u0001\u0000\u0000\u0000\u0000^\u0001\u0000\u0000" + + "\u0000\u0000`\u0001\u0000\u0000\u0000\u0000b\u0001\u0000\u0000\u0000\u0000" + + "d\u0001\u0000\u0000\u0000\u0000f\u0001\u0000\u0000\u0000\u0000h\u0001" + + "\u0000\u0000\u0000\u0000j\u0001\u0000\u0000\u0000\u0000l\u0001\u0000\u0000" + + "\u0000\u0000n\u0001\u0000\u0000\u0000\u0000p\u0001\u0000\u0000\u0000\u0000" + + "r\u0001\u0000\u0000\u0000\u0000t\u0001\u0000\u0000\u0000\u0000v\u0001" + + "\u0000\u0000\u0000\u0000x\u0001\u0000\u0000\u0000\u0000z\u0001\u0000\u0000" + + "\u0000\u0000|\u0001\u0000\u0000\u0000\u0000~\u0001\u0000\u0000\u0000\u0000" + + "\u0080\u0001\u0000\u0000\u0000\u0000\u0082\u0001\u0000\u0000\u0000\u0000" + + "\u0084\u0001\u0000\u0000\u0000\u0000\u0086\u0001\u0000\u0000\u0000\u0000" + + "\u0088\u0001\u0000\u0000\u0000\u0000\u008a\u0001\u0000\u0000\u0000\u0000" + + "\u008c\u0001\u0000\u0000\u0000\u0000\u008e\u0001\u0000\u0000\u0000\u0000" + + "\u0090\u0001\u0000\u0000\u0000\u0000\u0092\u0001\u0000\u0000\u0000\u0000" + + "\u0094\u0001\u0000\u0000\u0000\u0000\u0096\u0001\u0000\u0000\u0000\u0000" + + "\u0098\u0001\u0000\u0000\u0000\u0000\u009a\u0001\u0000\u0000\u0000\u0000" + + "\u009c\u0001\u0000\u0000\u0000\u0000\u009e\u0001\u0000\u0000\u0000\u0000" + + "\u00a0\u0001\u0000\u0000\u0000\u0000\u00a2\u0001\u0000\u0000\u0000\u0000" + + "\u00a4\u0001\u0000\u0000\u0000\u0000\u00a6\u0001\u0000\u0000\u0000\u0001" + + "\u00a8\u0001\u0000\u0000\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0002" + + "\u00ad\u0001\u0000\u0000\u0000\u0004\u00c7\u0001\u0000\u0000\u0000\u0006" + + "\u00cb\u0001\u0000\u0000\u0000\b\u00cd\u0001\u0000\u0000\u0000\n\u00cf" + + "\u0001\u0000\u0000\u0000\f\u00d1\u0001\u0000\u0000\u0000\u000e\u00d3\u0001" + + "\u0000\u0000\u0000\u0010\u00d5\u0001\u0000\u0000\u0000\u0012\u00d7\u0001" + + "\u0000\u0000\u0000\u0014\u00db\u0001\u0000\u0000\u0000\u0016\u00e0\u0001" + + "\u0000\u0000\u0000\u0018\u00e2\u0001\u0000\u0000\u0000\u001a\u00e4\u0001" + + "\u0000\u0000\u0000\u001c\u00e7\u0001\u0000\u0000\u0000\u001e\u00ea\u0001" + + "\u0000\u0000\u0000 \u00ef\u0001\u0000\u0000\u0000\"\u00f5\u0001\u0000" + + "\u0000\u0000$\u00f8\u0001\u0000\u0000\u0000&\u00fc\u0001\u0000\u0000\u0000" + + "(\u0105\u0001\u0000\u0000\u0000*\u010b\u0001\u0000\u0000\u0000,\u0112" + + "\u0001\u0000\u0000\u0000.\u0116\u0001\u0000\u0000\u00000\u011a\u0001\u0000" + + "\u0000\u00002\u0120\u0001\u0000\u0000\u00004\u0126\u0001\u0000\u0000\u0000" + + "6\u012b\u0001\u0000\u0000\u00008\u0136\u0001\u0000\u0000\u0000:\u0138" + + "\u0001\u0000\u0000\u0000<\u013a\u0001\u0000\u0000\u0000>\u013c\u0001\u0000" + + "\u0000\u0000@\u013f\u0001\u0000\u0000\u0000B\u0141\u0001\u0000\u0000\u0000" + + "D\u0143\u0001\u0000\u0000\u0000F\u0145\u0001\u0000\u0000\u0000H\u0148" + + "\u0001\u0000\u0000\u0000J\u014b\u0001\u0000\u0000\u0000L\u014f\u0001\u0000" + + "\u0000\u0000N\u0151\u0001\u0000\u0000\u0000P\u0154\u0001\u0000\u0000\u0000" + + "R\u0156\u0001\u0000\u0000\u0000T\u0159\u0001\u0000\u0000\u0000V\u015c" + + "\u0001\u0000\u0000\u0000X\u0160\u0001\u0000\u0000\u0000Z\u0163\u0001\u0000" + + "\u0000\u0000\\\u0167\u0001\u0000\u0000\u0000^\u0169\u0001\u0000\u0000" + + "\u0000`\u016b\u0001\u0000\u0000\u0000b\u016d\u0001\u0000\u0000\u0000d" + + "\u0170\u0001\u0000\u0000\u0000f\u0173\u0001\u0000\u0000\u0000h\u0175\u0001" + + "\u0000\u0000\u0000j\u0177\u0001\u0000\u0000\u0000l\u017a\u0001\u0000\u0000" + + "\u0000n\u017d\u0001\u0000\u0000\u0000p\u0180\u0001\u0000\u0000\u0000r" + + "\u0183\u0001\u0000\u0000\u0000t\u0187\u0001\u0000\u0000\u0000v\u018a\u0001" + + "\u0000\u0000\u0000x\u018d\u0001\u0000\u0000\u0000z\u018f\u0001\u0000\u0000" + + "\u0000|\u0192\u0001\u0000\u0000\u0000~\u0195\u0001\u0000\u0000\u0000\u0080" + + "\u0198\u0001\u0000\u0000\u0000\u0082\u019b\u0001\u0000\u0000\u0000\u0084" + + "\u019e\u0001\u0000\u0000\u0000\u0086\u01a1\u0001\u0000\u0000\u0000\u0088" + + "\u01a4\u0001\u0000\u0000\u0000\u008a\u01a7\u0001\u0000\u0000\u0000\u008c" + + "\u01ab\u0001\u0000\u0000\u0000\u008e\u01af\u0001\u0000\u0000\u0000\u0090" + + "\u01b4\u0001\u0000\u0000\u0000\u0092\u01bd\u0001\u0000\u0000\u0000\u0094" + + "\u01cf\u0001\u0000\u0000\u0000\u0096\u01dc\u0001\u0000\u0000\u0000\u0098" + + "\u020c\u0001\u0000\u0000\u0000\u009a\u020e\u0001\u0000\u0000\u0000\u009c" + + "\u021f\u0001\u0000\u0000\u0000\u009e\u0224\u0001\u0000\u0000\u0000\u00a0" + + "\u022a\u0001\u0000\u0000\u0000\u00a2\u0255\u0001\u0000\u0000\u0000\u00a4" + + "\u0257\u0001\u0000\u0000\u0000\u00a6\u025b\u0001\u0000\u0000\u0000\u00a8" + + "\u026a\u0001\u0000\u0000\u0000\u00aa\u026e\u0001\u0000\u0000\u0000\u00ac" + + "\u00ae\u0007\u0000\u0000\u0000\u00ad\u00ac\u0001\u0000\u0000\u0000\u00ae" + + "\u00af\u0001\u0000\u0000\u0000\u00af\u00ad\u0001\u0000\u0000\u0000\u00af" + + "\u00b0\u0001\u0000\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1" + + "\u00b2\u0006\u0000\u0000\u0000\u00b2\u0003\u0001\u0000\u0000\u0000\u00b3" + + "\u00b4\u0005/\u0000\u0000\u00b4\u00b5\u0005/\u0000\u0000\u00b5\u00b9\u0001" + + "\u0000\u0000\u0000\u00b6\u00b8\b\u0001\u0000\u0000\u00b7\u00b6\u0001\u0000" + + "\u0000\u0000\u00b8\u00bb\u0001\u0000\u0000\u0000\u00b9\u00b7\u0001\u0000" + + "\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000\u0000\u00ba\u00c8\u0001\u0000" + + "\u0000\u0000\u00bb\u00b9\u0001\u0000\u0000\u0000\u00bc\u00bd\u0005/\u0000" + + "\u0000\u00bd\u00be\u0005*\u0000\u0000\u00be\u00c2\u0001\u0000\u0000\u0000" + + "\u00bf\u00c1\t\u0000\u0000\u0000\u00c0\u00bf\u0001\u0000\u0000\u0000\u00c1" + + "\u00c4\u0001\u0000\u0000\u0000\u00c2\u00c3\u0001\u0000\u0000\u0000\u00c2" + + "\u00c0\u0001\u0000\u0000\u0000\u00c3\u00c5\u0001\u0000\u0000\u0000\u00c4" + + "\u00c2\u0001\u0000\u0000\u0000\u00c5\u00c6\u0005*\u0000\u0000\u00c6\u00c8" + + "\u0005/\u0000\u0000\u00c7\u00b3\u0001\u0000\u0000\u0000\u00c7\u00bc\u0001" + + "\u0000\u0000\u0000\u00c8\u00c9\u0001\u0000\u0000\u0000\u00c9\u00ca\u0006" + + "\u0001\u0000\u0000\u00ca\u0005\u0001\u0000\u0000\u0000\u00cb\u00cc\u0005" + + "{\u0000\u0000\u00cc\u0007\u0001\u0000\u0000\u0000\u00cd\u00ce\u0005}\u0000" + + "\u0000\u00ce\t\u0001\u0000\u0000\u0000\u00cf\u00d0\u0005[\u0000\u0000" + + "\u00d0\u000b\u0001\u0000\u0000\u0000\u00d1\u00d2\u0005]\u0000\u0000\u00d2" + + "\r\u0001\u0000\u0000\u0000\u00d3\u00d4\u0005(\u0000\u0000\u00d4\u000f" + + "\u0001\u0000\u0000\u0000\u00d5\u00d6\u0005)\u0000\u0000\u00d6\u0011\u0001" + + "\u0000\u0000\u0000\u00d7\u00d8\u0005.\u0000\u0000\u00d8\u00d9\u0001\u0000" + + "\u0000\u0000\u00d9\u00da\u0006\b\u0001\u0000\u00da\u0013\u0001\u0000\u0000" + + "\u0000\u00db\u00dc\u0005?\u0000\u0000\u00dc\u00dd\u0005.\u0000\u0000\u00dd" + + "\u00de\u0001\u0000\u0000\u0000\u00de\u00df\u0006\t\u0001\u0000\u00df\u0015" + + "\u0001\u0000\u0000\u0000\u00e0\u00e1\u0005,\u0000\u0000\u00e1\u0017\u0001" + + "\u0000\u0000\u0000\u00e2\u00e3\u0005;\u0000\u0000\u00e3\u0019\u0001\u0000" + + "\u0000\u0000\u00e4\u00e5\u0005i\u0000\u0000\u00e5\u00e6\u0005f\u0000\u0000" + + "\u00e6\u001b\u0001\u0000\u0000\u0000\u00e7\u00e8\u0005i\u0000\u0000\u00e8" + + "\u00e9\u0005n\u0000\u0000\u00e9\u001d\u0001\u0000\u0000\u0000\u00ea\u00eb" + + "\u0005e\u0000\u0000\u00eb\u00ec\u0005l\u0000\u0000\u00ec\u00ed\u0005s" + + "\u0000\u0000\u00ed\u00ee\u0005e\u0000\u0000\u00ee\u001f\u0001\u0000\u0000" + + "\u0000\u00ef\u00f0\u0005w\u0000\u0000\u00f0\u00f1\u0005h\u0000\u0000\u00f1" + + "\u00f2\u0005i\u0000\u0000\u00f2\u00f3\u0005l\u0000\u0000\u00f3\u00f4\u0005" + + "e\u0000\u0000\u00f4!\u0001\u0000\u0000\u0000\u00f5\u00f6\u0005d\u0000" + + "\u0000\u00f6\u00f7\u0005o\u0000\u0000\u00f7#\u0001\u0000\u0000\u0000\u00f8" + + "\u00f9\u0005f\u0000\u0000\u00f9\u00fa\u0005o\u0000\u0000\u00fa\u00fb\u0005" + + "r\u0000\u0000\u00fb%\u0001\u0000\u0000\u0000\u00fc\u00fd\u0005c\u0000" + + "\u0000\u00fd\u00fe\u0005o\u0000\u0000\u00fe\u00ff\u0005n\u0000\u0000\u00ff" + + "\u0100\u0005t\u0000\u0000\u0100\u0101\u0005i\u0000\u0000\u0101\u0102\u0005" + + "n\u0000\u0000\u0102\u0103\u0005u\u0000\u0000\u0103\u0104\u0005e\u0000" + + "\u0000\u0104\'\u0001\u0000\u0000\u0000\u0105\u0106\u0005b\u0000\u0000" + + "\u0106\u0107\u0005r\u0000\u0000\u0107\u0108\u0005e\u0000\u0000\u0108\u0109" + + "\u0005a\u0000\u0000\u0109\u010a\u0005k\u0000\u0000\u010a)\u0001\u0000" + + "\u0000\u0000\u010b\u010c\u0005r\u0000\u0000\u010c\u010d\u0005e\u0000\u0000" + + "\u010d\u010e\u0005t\u0000\u0000\u010e\u010f\u0005u\u0000\u0000\u010f\u0110" + + "\u0005r\u0000\u0000\u0110\u0111\u0005n\u0000\u0000\u0111+\u0001\u0000" + + "\u0000\u0000\u0112\u0113\u0005n\u0000\u0000\u0113\u0114\u0005e\u0000\u0000" + + "\u0114\u0115\u0005w\u0000\u0000\u0115-\u0001\u0000\u0000\u0000\u0116\u0117" + + "\u0005t\u0000\u0000\u0117\u0118\u0005r\u0000\u0000\u0118\u0119\u0005y" + + "\u0000\u0000\u0119/\u0001\u0000\u0000\u0000\u011a\u011b\u0005c\u0000\u0000" + + "\u011b\u011c\u0005a\u0000\u0000\u011c\u011d\u0005t\u0000\u0000\u011d\u011e" + + "\u0005c\u0000\u0000\u011e\u011f\u0005h\u0000\u0000\u011f1\u0001\u0000" + + "\u0000\u0000\u0120\u0121\u0005t\u0000\u0000\u0121\u0122\u0005h\u0000\u0000" + + "\u0122\u0123\u0005r\u0000\u0000\u0123\u0124\u0005o\u0000\u0000\u0124\u0125" + + "\u0005w\u0000\u0000\u01253\u0001\u0000\u0000\u0000\u0126\u0127\u0005t" + + "\u0000\u0000\u0127\u0128\u0005h\u0000\u0000\u0128\u0129\u0005i\u0000\u0000" + + "\u0129\u012a\u0005s\u0000\u0000\u012a5\u0001\u0000\u0000\u0000\u012b\u012c" + + "\u0005i\u0000\u0000\u012c\u012d\u0005n\u0000\u0000\u012d\u012e\u0005s" + + "\u0000\u0000\u012e\u012f\u0005t\u0000\u0000\u012f\u0130\u0005a\u0000\u0000" + + "\u0130\u0131\u0005n\u0000\u0000\u0131\u0132\u0005c\u0000\u0000\u0132\u0133" + + "\u0005e\u0000\u0000\u0133\u0134\u0005o\u0000\u0000\u0134\u0135\u0005f" + + "\u0000\u0000\u01357\u0001\u0000\u0000\u0000\u0136\u0137\u0005!\u0000\u0000" + + "\u01379\u0001\u0000\u0000\u0000\u0138\u0139\u0005~\u0000\u0000\u0139;" + + "\u0001\u0000\u0000\u0000\u013a\u013b\u0005*\u0000\u0000\u013b=\u0001\u0000" + + "\u0000\u0000\u013c\u013d\u0005/\u0000\u0000\u013d\u013e\u0004\u001e\u0000" + + "\u0000\u013e?\u0001\u0000\u0000\u0000\u013f\u0140\u0005%\u0000\u0000\u0140" + + "A\u0001\u0000\u0000\u0000\u0141\u0142\u0005+\u0000\u0000\u0142C\u0001" + + "\u0000\u0000\u0000\u0143\u0144\u0005-\u0000\u0000\u0144E\u0001\u0000\u0000" + + "\u0000\u0145\u0146\u0005<\u0000\u0000\u0146\u0147\u0005<\u0000\u0000\u0147" + + "G\u0001\u0000\u0000\u0000\u0148\u0149\u0005>\u0000\u0000\u0149\u014a\u0005" + + ">\u0000\u0000\u014aI\u0001\u0000\u0000\u0000\u014b\u014c\u0005>\u0000" + + "\u0000\u014c\u014d\u0005>\u0000\u0000\u014d\u014e\u0005>\u0000\u0000\u014e" + + "K\u0001\u0000\u0000\u0000\u014f\u0150\u0005<\u0000\u0000\u0150M\u0001" + + "\u0000\u0000\u0000\u0151\u0152\u0005<\u0000\u0000\u0152\u0153\u0005=\u0000" + + "\u0000\u0153O\u0001\u0000\u0000\u0000\u0154\u0155\u0005>\u0000\u0000\u0155" + + "Q\u0001\u0000\u0000\u0000\u0156\u0157\u0005>\u0000\u0000\u0157\u0158\u0005" + + "=\u0000\u0000\u0158S\u0001\u0000\u0000\u0000\u0159\u015a\u0005=\u0000" + + "\u0000\u015a\u015b\u0005=\u0000\u0000\u015bU\u0001\u0000\u0000\u0000\u015c" + + "\u015d\u0005=\u0000\u0000\u015d\u015e\u0005=\u0000\u0000\u015e\u015f\u0005" + + "=\u0000\u0000\u015fW\u0001\u0000\u0000\u0000\u0160\u0161\u0005!\u0000" + + "\u0000\u0161\u0162\u0005=\u0000\u0000\u0162Y\u0001\u0000\u0000\u0000\u0163" + + "\u0164\u0005!\u0000\u0000\u0164\u0165\u0005=\u0000\u0000\u0165\u0166\u0005" + + "=\u0000\u0000\u0166[\u0001\u0000\u0000\u0000\u0167\u0168\u0005&\u0000" + + "\u0000\u0168]\u0001\u0000\u0000\u0000\u0169\u016a\u0005^\u0000\u0000\u016a" + + "_\u0001\u0000\u0000\u0000\u016b\u016c\u0005|\u0000\u0000\u016ca\u0001" + + "\u0000\u0000\u0000\u016d\u016e\u0005&\u0000\u0000\u016e\u016f\u0005&\u0000" + + "\u0000\u016fc\u0001\u0000\u0000\u0000\u0170\u0171\u0005|\u0000\u0000\u0171" + + "\u0172\u0005|\u0000\u0000\u0172e\u0001\u0000\u0000\u0000\u0173\u0174\u0005" + + "?\u0000\u0000\u0174g\u0001\u0000\u0000\u0000\u0175\u0176\u0005:\u0000" + + "\u0000\u0176i\u0001\u0000\u0000\u0000\u0177\u0178\u0005?\u0000\u0000\u0178" + + "\u0179\u0005:\u0000\u0000\u0179k\u0001\u0000\u0000\u0000\u017a\u017b\u0005" + + ":\u0000\u0000\u017b\u017c\u0005:\u0000\u0000\u017cm\u0001\u0000\u0000" + + "\u0000\u017d\u017e\u0005-\u0000\u0000\u017e\u017f\u0005>\u0000\u0000\u017f" + + "o\u0001\u0000\u0000\u0000\u0180\u0181\u0005=\u0000\u0000\u0181\u0182\u0005" + + "~\u0000\u0000\u0182q\u0001\u0000\u0000\u0000\u0183\u0184\u0005=\u0000" + + "\u0000\u0184\u0185\u0005=\u0000\u0000\u0185\u0186\u0005~\u0000\u0000\u0186" + + "s\u0001\u0000\u0000\u0000\u0187\u0188\u0005+\u0000\u0000\u0188\u0189\u0005" + + "+\u0000\u0000\u0189u\u0001\u0000\u0000\u0000\u018a\u018b\u0005-\u0000" + + "\u0000\u018b\u018c\u0005-\u0000\u0000\u018cw\u0001\u0000\u0000\u0000\u018d" + + "\u018e\u0005=\u0000\u0000\u018ey\u0001\u0000\u0000\u0000\u018f\u0190\u0005" + + "+\u0000\u0000\u0190\u0191\u0005=\u0000\u0000\u0191{\u0001\u0000\u0000" + + "\u0000\u0192\u0193\u0005-\u0000\u0000\u0193\u0194\u0005=\u0000\u0000\u0194" + + "}\u0001\u0000\u0000\u0000\u0195\u0196\u0005*\u0000\u0000\u0196\u0197\u0005" + + "=\u0000\u0000\u0197\u007f\u0001\u0000\u0000\u0000\u0198\u0199\u0005/\u0000" + + "\u0000\u0199\u019a\u0005=\u0000\u0000\u019a\u0081\u0001\u0000\u0000\u0000" + + "\u019b\u019c\u0005%\u0000\u0000\u019c\u019d\u0005=\u0000\u0000\u019d\u0083" + + "\u0001\u0000\u0000\u0000\u019e\u019f\u0005&\u0000\u0000\u019f\u01a0\u0005" + + "=\u0000\u0000\u01a0\u0085\u0001\u0000\u0000\u0000\u01a1\u01a2\u0005^\u0000" + + "\u0000\u01a2\u01a3\u0005=\u0000\u0000\u01a3\u0087\u0001\u0000\u0000\u0000" + + "\u01a4\u01a5\u0005|\u0000\u0000\u01a5\u01a6\u0005=\u0000\u0000\u01a6\u0089" + + "\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005<\u0000\u0000\u01a8\u01a9\u0005" + + "<\u0000\u0000\u01a9\u01aa\u0005=\u0000\u0000\u01aa\u008b\u0001\u0000\u0000" + + "\u0000\u01ab\u01ac\u0005>\u0000\u0000\u01ac\u01ad\u0005>\u0000\u0000\u01ad" + + "\u01ae\u0005=\u0000\u0000\u01ae\u008d\u0001\u0000\u0000\u0000\u01af\u01b0" + + "\u0005>\u0000\u0000\u01b0\u01b1\u0005>\u0000\u0000\u01b1\u01b2\u0005>" + + "\u0000\u0000\u01b2\u01b3\u0005=\u0000\u0000\u01b3\u008f\u0001\u0000\u0000" + + "\u0000\u01b4\u01b6\u00050\u0000\u0000\u01b5\u01b7\u0007\u0002\u0000\u0000" + + "\u01b6\u01b5\u0001\u0000\u0000\u0000\u01b7\u01b8\u0001\u0000\u0000\u0000" + + "\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000" + + "\u01b9\u01bb\u0001\u0000\u0000\u0000\u01ba\u01bc\u0007\u0003\u0000\u0000" + + "\u01bb\u01ba\u0001\u0000\u0000\u0000\u01bb\u01bc\u0001\u0000\u0000\u0000" + + "\u01bc\u0091\u0001\u0000\u0000\u0000\u01bd\u01be\u00050\u0000\u0000\u01be" + + "\u01c0\u0007\u0004\u0000\u0000\u01bf\u01c1\u0007\u0005\u0000\u0000\u01c0" + + "\u01bf\u0001\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2" + + "\u01c0\u0001\u0000\u0000\u0000\u01c2\u01c3\u0001\u0000\u0000\u0000\u01c3" + + "\u01c5\u0001\u0000\u0000\u0000\u01c4\u01c6\u0007\u0003\u0000\u0000\u01c5" + + "\u01c4\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6" + + "\u0093\u0001\u0000\u0000\u0000\u01c7\u01d0\u00050\u0000\u0000\u01c8\u01cc" + + "\u0007\u0006\u0000\u0000\u01c9\u01cb\u0007\u0007\u0000\u0000\u01ca\u01c9" + + "\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001\u0000\u0000\u0000\u01cc\u01ca" + + "\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001\u0000\u0000\u0000\u01cd\u01d0" + + "\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001\u0000\u0000\u0000\u01cf\u01c7" + + "\u0001\u0000\u0000\u0000\u01cf\u01c8\u0001\u0000\u0000\u0000\u01d0\u01d2" + + "\u0001\u0000\u0000\u0000\u01d1\u01d3\u0007\b\u0000\u0000\u01d2\u01d1\u0001" + + "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u0095\u0001" + + "\u0000\u0000\u0000\u01d4\u01dd\u00050\u0000\u0000\u01d5\u01d9\u0007\u0006" + + "\u0000\u0000\u01d6\u01d8\u0007\u0007\u0000\u0000\u01d7\u01d6\u0001\u0000" + + "\u0000\u0000\u01d8\u01db\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000" + + "\u0000\u0000\u01d9\u01da\u0001\u0000\u0000\u0000\u01da\u01dd\u0001\u0000" + + "\u0000\u0000\u01db\u01d9\u0001\u0000\u0000\u0000\u01dc\u01d4\u0001\u0000" + + "\u0000\u0000\u01dc\u01d5\u0001\u0000\u0000\u0000\u01dd\u01e4\u0001\u0000" + + "\u0000\u0000\u01de\u01e0\u0003\u0012\b\u0000\u01df\u01e1\u0007\u0007\u0000" + + "\u0000\u01e0\u01df\u0001\u0000\u0000\u0000\u01e1\u01e2\u0001\u0000\u0000" + + "\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000" + + "\u0000\u01e3\u01e5\u0001\u0000\u0000\u0000\u01e4\u01de\u0001\u0000\u0000" + + "\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000" + + "\u0000\u01e6\u01e8\u0007\t\u0000\u0000\u01e7\u01e9\u0007\n\u0000\u0000" + + "\u01e8\u01e7\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000" + + "\u01e9\u01eb\u0001\u0000\u0000\u0000\u01ea\u01ec\u0007\u0007\u0000\u0000" + + "\u01eb\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed\u0001\u0000\u0000\u0000" + + "\u01ed\u01eb\u0001\u0000\u0000\u0000\u01ed\u01ee\u0001\u0000\u0000\u0000" + + "\u01ee\u01f0\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000" + + "\u01ef\u01f0\u0001\u0000\u0000\u0000\u01f0\u01f2\u0001\u0000\u0000\u0000" + + "\u01f1\u01f3\u0007\u000b\u0000\u0000\u01f2\u01f1\u0001\u0000\u0000\u0000" + + "\u01f2\u01f3\u0001\u0000\u0000\u0000\u01f3\u0097\u0001\u0000\u0000\u0000" + + "\u01f4\u01fc\u0005\"\u0000\u0000\u01f5\u01f6\u0005\\\u0000\u0000\u01f6" + + "\u01fb\u0005\"\u0000\u0000\u01f7\u01f8\u0005\\\u0000\u0000\u01f8\u01fb" + + "\u0005\\\u0000\u0000\u01f9\u01fb\b\f\u0000\u0000\u01fa\u01f5\u0001\u0000" + + "\u0000\u0000\u01fa\u01f7\u0001\u0000\u0000\u0000\u01fa\u01f9\u0001\u0000" + + "\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000\u0000\u01fc\u01fd\u0001\u0000" + + "\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01ff\u0001\u0000" + + "\u0000\u0000\u01fe\u01fc\u0001\u0000\u0000\u0000\u01ff\u020d\u0005\"\u0000" + + "\u0000\u0200\u0208\u0005\'\u0000\u0000\u0201\u0202\u0005\\\u0000\u0000" + + "\u0202\u0207\u0005\'\u0000\u0000\u0203\u0204\u0005\\\u0000\u0000\u0204" + + "\u0207\u0005\\\u0000\u0000\u0205\u0207\b\r\u0000\u0000\u0206\u0201\u0001" + + "\u0000\u0000\u0000\u0206\u0203\u0001\u0000\u0000\u0000\u0206\u0205\u0001" + + "\u0000\u0000\u0000\u0207\u020a\u0001\u0000\u0000\u0000\u0208\u0209\u0001" + + "\u0000\u0000\u0000\u0208\u0206\u0001\u0000\u0000\u0000\u0209\u020b\u0001" + + "\u0000\u0000\u0000\u020a\u0208\u0001\u0000\u0000\u0000\u020b\u020d\u0005" + + "\'\u0000\u0000\u020c\u01f4\u0001\u0000\u0000\u0000\u020c\u0200\u0001\u0000" + + "\u0000\u0000\u020d\u0099\u0001\u0000\u0000\u0000\u020e\u0212\u0005/\u0000" + + "\u0000\u020f\u0210\u0005\\\u0000\u0000\u0210\u0213\b\u000e\u0000\u0000" + + "\u0211\u0213\b\u000f\u0000\u0000\u0212\u020f\u0001\u0000\u0000\u0000\u0212" + + "\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000\u0214" + + "\u0215\u0001\u0000\u0000\u0000\u0214\u0212\u0001\u0000\u0000\u0000\u0215" + + "\u0216\u0001\u0000\u0000\u0000\u0216\u021a\u0005/\u0000\u0000\u0217\u0219" + + "\u0007\u0010\u0000\u0000\u0218\u0217\u0001\u0000\u0000\u0000\u0219\u021c" + + "\u0001\u0000\u0000\u0000\u021a\u0218\u0001\u0000\u0000\u0000\u021a\u021b" + + "\u0001\u0000\u0000\u0000\u021b\u021d\u0001\u0000\u0000\u0000\u021c\u021a" + + "\u0001\u0000\u0000\u0000\u021d\u021e\u0004L\u0001\u0000\u021e\u009b\u0001" + + "\u0000\u0000\u0000\u021f\u0220\u0005t\u0000\u0000\u0220\u0221\u0005r\u0000" + + "\u0000\u0221\u0222\u0005u\u0000\u0000\u0222\u0223\u0005e\u0000\u0000\u0223" + + "\u009d\u0001\u0000\u0000\u0000\u0224\u0225\u0005f\u0000\u0000\u0225\u0226" + + "\u0005a\u0000\u0000\u0226\u0227\u0005l\u0000\u0000\u0227\u0228\u0005s" + + "\u0000\u0000\u0228\u0229\u0005e\u0000\u0000\u0229\u009f\u0001\u0000\u0000" + + "\u0000\u022a\u022b\u0005n\u0000\u0000\u022b\u022c\u0005u\u0000\u0000\u022c" + + "\u022d\u0005l\u0000\u0000\u022d\u022e\u0005l\u0000\u0000\u022e\u00a1\u0001" + + "\u0000\u0000\u0000\u022f\u0230\u0005b\u0000\u0000\u0230\u0231\u0005o\u0000" + + "\u0000\u0231\u0232\u0005o\u0000\u0000\u0232\u0233\u0005l\u0000\u0000\u0233" + + "\u0234\u0005e\u0000\u0000\u0234\u0235\u0005a\u0000\u0000\u0235\u0256\u0005" + + "n\u0000\u0000\u0236\u0237\u0005b\u0000\u0000\u0237\u0238\u0005y\u0000" + + "\u0000\u0238\u0239\u0005t\u0000\u0000\u0239\u0256\u0005e\u0000\u0000\u023a" + + "\u023b\u0005s\u0000\u0000\u023b\u023c\u0005h\u0000\u0000\u023c\u023d\u0005" + + "o\u0000\u0000\u023d\u023e\u0005r\u0000\u0000\u023e\u0256\u0005t\u0000" + + "\u0000\u023f\u0240\u0005c\u0000\u0000\u0240\u0241\u0005h\u0000\u0000\u0241" + + "\u0242\u0005a\u0000\u0000\u0242\u0256\u0005r\u0000\u0000\u0243\u0244\u0005" + + "i\u0000\u0000\u0244\u0245\u0005n\u0000\u0000\u0245\u0256\u0005t\u0000" + + "\u0000\u0246\u0247\u0005l\u0000\u0000\u0247\u0248\u0005o\u0000\u0000\u0248" + + "\u0249\u0005n\u0000\u0000\u0249\u0256\u0005g\u0000\u0000\u024a\u024b\u0005" + + "f\u0000\u0000\u024b\u024c\u0005l\u0000\u0000\u024c\u024d\u0005o\u0000" + + "\u0000\u024d\u024e\u0005a\u0000\u0000\u024e\u0256\u0005t\u0000\u0000\u024f" + + "\u0250\u0005d\u0000\u0000\u0250\u0251\u0005o\u0000\u0000\u0251\u0252\u0005" + + "u\u0000\u0000\u0252\u0253\u0005b\u0000\u0000\u0253\u0254\u0005l\u0000" + + "\u0000\u0254\u0256\u0005e\u0000\u0000\u0255\u022f\u0001\u0000\u0000\u0000" + + "\u0255\u0236\u0001\u0000\u0000\u0000\u0255\u023a\u0001\u0000\u0000\u0000" + + "\u0255\u023f\u0001\u0000\u0000\u0000\u0255\u0243\u0001\u0000\u0000\u0000" + + "\u0255\u0246\u0001\u0000\u0000\u0000\u0255\u024a\u0001\u0000\u0000\u0000" + + "\u0255\u024f\u0001\u0000\u0000\u0000\u0256\u00a3\u0001\u0000\u0000\u0000" + + "\u0257\u0258\u0005d\u0000\u0000\u0258\u0259\u0005e\u0000\u0000\u0259\u025a" + + "\u0005f\u0000\u0000\u025a\u00a5\u0001\u0000\u0000\u0000\u025b\u025f\u0007" + + "\u0011\u0000\u0000\u025c\u025e\u0007\u0012\u0000\u0000\u025d\u025c\u0001" + + "\u0000\u0000\u0000\u025e\u0261\u0001\u0000\u0000\u0000\u025f\u025d\u0001" + + "\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260\u00a7\u0001" + + "\u0000\u0000\u0000\u0261\u025f\u0001\u0000\u0000\u0000\u0262\u026b\u0005" + + "0\u0000\u0000\u0263\u0267\u0007\u0006\u0000\u0000\u0264\u0266\u0007\u0007" + + "\u0000\u0000\u0265\u0264\u0001\u0000\u0000\u0000\u0266\u0269\u0001\u0000" + + "\u0000\u0000\u0267\u0265\u0001\u0000\u0000\u0000\u0267\u0268\u0001\u0000" + + "\u0000\u0000\u0268\u026b\u0001\u0000\u0000\u0000\u0269\u0267\u0001\u0000" + + "\u0000\u0000\u026a\u0262\u0001\u0000\u0000\u0000\u026a\u0263\u0001\u0000" + + "\u0000\u0000\u026b\u026c\u0001\u0000\u0000\u0000\u026c\u026d\u0006S\u0002" + + "\u0000\u026d\u00a9\u0001\u0000\u0000\u0000\u026e\u0272\u0007\u0011\u0000" + + "\u0000\u026f\u0271\u0007\u0012\u0000\u0000\u0270\u026f\u0001\u0000\u0000" + + "\u0000\u0271\u0274\u0001\u0000\u0000\u0000\u0272\u0270\u0001\u0000\u0000" + + "\u0000\u0272\u0273\u0001\u0000\u0000\u0000\u0273\u0275\u0001\u0000\u0000" + + "\u0000\u0274\u0272\u0001\u0000\u0000\u0000\u0275\u0276\u0006T\u0002\u0000" + + "\u0276\u00ab\u0001\u0000\u0000\u0000\"\u0000\u0001\u00af\u00b9\u00c2\u00c7" + + "\u01b8\u01bb\u01c2\u01c5\u01cc\u01cf\u01d2\u01d9\u01dc\u01e2\u01e4\u01e8" + + "\u01ed\u01ef\u01f2\u01fa\u01fc\u0206\u0208\u020c\u0212\u0214\u021a\u0255" + + "\u025f\u0267\u026a\u0272\u0003\u0006\u0000\u0000\u0002\u0001\u0000\u0002" + + "\u0000\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { _decisionToDFA = new DFA[_ATN.getNumberOfDecisions()]; diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java new file mode 100644 index 0000000000000..dbba3226ba300 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/CommentTests.java @@ -0,0 +1,51 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.painless; + +public class CommentTests extends ScriptTestCase { + + public void testSingleLineComments() { + assertEquals(5, exec("// comment\n return 5")); + assertEquals(5, exec("// comment\r return 5")); + assertEquals(5, exec("return 5 // comment no newline or return char")); + } + + public void testOpenCloseComments() { + assertEquals(5, exec("/* single-line comment */ return 5")); + assertEquals(5, exec("/* multi-line \n */ return 5")); + assertEquals(5, exec("/* multi-line \r */ return 5")); + assertEquals(5, exec("/* multi-line \n\n\r\r */ return 5")); + assertEquals(5, exec("def five = 5; /* multi-line \r */ return five")); + assertEquals(5, exec("return 5 /* multi-line ignored code */")); + } +} From feeb595be5c998efca4d11d65ea6a3d00cea17dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Luk=C3=A1=C5=A1=20Vl=C4=8Dek?= <lukas.vlcek@aiven.io> Date: Wed, 10 Jan 2024 17:44:26 +0100 Subject: [PATCH 075/178] Fix typo in API annotation check message (#11836) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Fixing some internal comments as well Signed-off-by: LukΓ‘Ε‘ Vlček <lukas.vlcek@aiven.io> --- CHANGELOG.md | 1 + .../processor/ApiAnnotationProcessor.java | 6 ++--- .../ApiAnnotationProcessorTests.java | 22 +++++++++---------- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1e00e42e5b756..fed2bb54783a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944)) - Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993)) - Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) +- Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836)) ### Security diff --git a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java index 1864aec4aa951..569f48a8465f3 100644 --- a/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java +++ b/libs/common/src/main/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessor.java @@ -113,7 +113,7 @@ private void process(ExecutableElement executable, Element enclosing) { // The executable element should not be internal (unless constructor for injectable core component) checkNotInternal(enclosing, executable); - // Check this elements annotations + // Check this element's annotations for (final AnnotationMirror annotation : executable.getAnnotationMirrors()) { final Element element = annotation.getAnnotationType().asElement(); if (inspectable(element)) { @@ -210,7 +210,7 @@ private void process(ExecutableElement executable, ReferenceType ref) { } } - // Check this elements annotations + // Check this element's annotations for (final AnnotationMirror annotation : ref.getAnnotationMirrors()) { final Element element = annotation.getAnnotationType().asElement(); if (inspectable(element)) { @@ -316,7 +316,7 @@ private void checkPublic(@Nullable Element referencedBy, final Element element) reportFailureAs, "The element " + element - + " is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi" + + " is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi" + ((referencedBy != null) ? " (referenced by " + referencedBy + ") " : "") ); } diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java index df04709458b29..8d8a4c7895339 100644 --- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java +++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/ApiAnnotationProcessorTests.java @@ -35,7 +35,7 @@ public void testPublicApiMethodArgumentNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotated)" ) ) @@ -56,7 +56,7 @@ public void testPublicApiMethodArgumentNotAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedGenerics)" ) ) @@ -77,7 +77,7 @@ public void testPublicApiMethodThrowsNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedException is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodThrowsNotAnnotated)" ) ) @@ -111,7 +111,7 @@ public void testPublicApiMethodArgumentNotAnnotatedPackagePrivate() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedPackagePrivate is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodArgumentNotAnnotatedPackagePrivate)" ) ) @@ -209,7 +209,7 @@ public void testPublicApiMethodReturnNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotated)" ) ) @@ -230,7 +230,7 @@ public void testPublicApiMethodReturnNotAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedGenerics)" ) ) @@ -251,7 +251,7 @@ public void testPublicApiMethodReturnNotAnnotatedArray() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedArray)" ) ) @@ -272,7 +272,7 @@ public void testPublicApiMethodReturnNotAnnotatedBoundedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedBoundedGenerics)" ) ) @@ -297,7 +297,7 @@ public void testPublicApiMethodReturnNotAnnotatedAnnotation() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnNotAnnotatedAnnotation)" ) ) @@ -388,7 +388,7 @@ public void testPublicApiMethodGenericsArgumentNotAnnotated() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotated is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodGenericsArgumentNotAnnotated)" ) ) @@ -453,7 +453,7 @@ public void testPublicApiMethodReturnAnnotatedGenerics() { matching( Diagnostic.Kind.ERROR, containsString( - "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not maked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "The element org.opensearch.common.annotation.processor.NotAnnotatedAnnotation is part of the public APIs but is not marked as @PublicApi, @ExperimentalApi or @DeprecatedApi " + "(referenced by org.opensearch.common.annotation.processor.PublicApiMethodReturnAnnotatedGenerics)" ) ) From 2f8bf031bf79baf42bc9fd2ba0269d0ece3cfbb9 Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Wed, 10 Jan 2024 12:05:14 -0800 Subject: [PATCH 076/178] Bump 2.12.0 to Lucene 9.9.1 in Version.java (#11837) Signed-off-by: Marc Handalian <marc.handalian@gmail.com> --- libs/core/src/main/java/org/opensearch/Version.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index b2a8e619c9720..6a92993f5dd42 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -98,7 +98,7 @@ public class Version implements Comparable<Version>, ToXContentFragment { public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); - public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_8_0); + public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1); public static final Version CURRENT = V_3_0_0; From 10be2eff03d8b7c93e2f5f4febb587edf12815e0 Mon Sep 17 00:00:00 2001 From: Neetika Singhal <neetiks@amazon.com> Date: Wed, 10 Jan 2024 14:15:18 -0800 Subject: [PATCH 077/178] Enable test CardinalityWithRequestBreakerIT.testRequestBreaker after lucene 99 upgrade (#11841) Signed-off-by: Neetika Singhal <neetiks@amazon.com> --- .../aggregations/metrics/CardinalityWithRequestBreakerIT.java | 1 - 1 file changed, 1 deletion(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index 6cbf278317b1b..94756f3fe9f99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -76,7 +76,6 @@ protected Settings featureFlagSettings() { /** * Test that searches using cardinality aggregations returns all request breaker memory. */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/10154") public void testRequestBreaker() throws Exception { final String requestBreaker = randomIntBetween(1, 10000) + "kb"; logger.info("--> Using request breaker setting: {}", requestBreaker); From de78c7ca6f4d3ca833263a320bb53dc7dd88ddc1 Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Wed, 10 Jan 2024 18:47:29 -0600 Subject: [PATCH 078/178] Remove unused segment replication feature flag (#11850) This feature has been released and the feature flag is no longer used. Signed-off-by: Andrew Ross <andrross@amazon.com> --- .../common/settings/FeatureFlagSettings.java | 22 ++++++------------- .../opensearch/common/util/FeatureFlags.java | 12 ---------- 2 files changed, 7 insertions(+), 27 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index d3285c379bcc4..e19f8e8370d5b 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -11,9 +11,6 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.util.FeatureFlags; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; import java.util.Set; /** @@ -32,17 +29,12 @@ protected FeatureFlagSettings( super(settings, settingsSet, settingUpgraders, scope); } - public static final Set<Setting<?>> BUILT_IN_FEATURE_FLAGS = Collections.unmodifiableSet( - new HashSet<>( - Arrays.asList( - FeatureFlags.SEGMENT_REPLICATION_EXPERIMENTAL_SETTING, - FeatureFlags.EXTENSIONS_SETTING, - FeatureFlags.IDENTITY_SETTING, - FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, - FeatureFlags.TELEMETRY_SETTING, - FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, - FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING - ) - ) + public static final Set<Setting<?>> BUILT_IN_FEATURE_FLAGS = Set.of( + FeatureFlags.EXTENSIONS_SETTING, + FeatureFlags.IDENTITY_SETTING, + FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, + FeatureFlags.TELEMETRY_SETTING, + FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, + FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING ); } diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index c54772caa574b..d4ab161527cc0 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -20,12 +20,6 @@ * @opensearch.internal */ public class FeatureFlags { - /** - * Gates the visibility of the segment replication experimental features that allows users to test unreleased beta features. - */ - public static final String SEGMENT_REPLICATION_EXPERIMENTAL = - "opensearch.experimental.feature.segment_replication_experimental.enabled"; - /** * Gates the ability for Searchable Snapshots to read snapshots that are older than the * guaranteed backward compatibility for OpenSearch (one prior major version) on a best effort basis. @@ -105,12 +99,6 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { } } - public static final Setting<Boolean> SEGMENT_REPLICATION_EXPERIMENTAL_SETTING = Setting.boolSetting( - SEGMENT_REPLICATION_EXPERIMENTAL, - false, - Property.NodeScope - ); - public static final Setting<Boolean> EXTENSIONS_SETTING = Setting.boolSetting(EXTENSIONS, false, Property.NodeScope); public static final Setting<Boolean> IDENTITY_SETTING = Setting.boolSetting(IDENTITY, false, Property.NodeScope); From e4583a438132034bcc20762fa76ef02a4b8713d9 Mon Sep 17 00:00:00 2001 From: Sagar <99425694+sgup432@users.noreply.github.com> Date: Wed, 10 Jan 2024 17:06:29 -0800 Subject: [PATCH 079/178] [Tiered Caching] Enable serialization of IndicesRequestCache.Key (#10275) --------- Signed-off-by: Sagar Upadhyaya <sagar.upadhyaya.121@gmail.com> Signed-off-by: Sagar <99425694+sgup432@users.noreply.github.com> Co-authored-by: Kiran Prakash <awskiran@amazon.com> --- CHANGELOG.md | 4 +- .../opensearch/core/index/shard/ShardId.java | 7 + .../indices/IndicesRequestCacheIT.java | 40 ++++ .../index/OpenSearchDirectoryReader.java | 61 ++++- .../indices/IndicesRequestCache.java | 101 +++++--- .../opensearch/indices/IndicesService.java | 16 +- .../indices/IndicesRequestCacheTests.java | 224 +++++++++++------- .../indices/IndicesServiceCloseTests.java | 19 +- 8 files changed, 327 insertions(+), 145 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fed2bb54783a7..0b256473dec6f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -101,8 +101,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) - Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) - Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) -- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753] - (https://github.com/opensearch-project/OpenSearch/pull/10753)) +- [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) +- [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) - [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) - Update the indexRandom function to create more segments for concurrent search tests ([10247](https://github.com/opensearch-project/OpenSearch/pull/10247)) - Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) diff --git a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java index c0abad7ed727f..1e48cf1f476da 100644 --- a/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java +++ b/libs/core/src/main/java/org/opensearch/core/index/shard/ShardId.java @@ -32,6 +32,7 @@ package org.opensearch.core.index.shard; +import org.apache.lucene.util.RamUsageEstimator; import org.opensearch.common.annotation.PublicApi; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; @@ -55,6 +56,8 @@ public class ShardId implements Comparable<ShardId>, ToXContentFragment, Writeab private final int shardId; private final int hashCode; + private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ShardId.class); + /** * Constructs a new shard id. * @param index the index name @@ -88,6 +91,10 @@ public ShardId(StreamInput in) throws IOException { hashCode = computeHashCode(); } + public long getBaseRamBytesUsed() { + return BASE_RAM_BYTES_USED; + } + /** * Writes this shard id to a stream. * @param out the stream to write to diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 848f6eddbb0df..51dba07a8f9f8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -636,6 +636,45 @@ public void testProfileDisableCache() throws Exception { } } + public void testCacheWithInvalidation() throws Exception { + Client client = client(); + assertAcked( + client.admin() + .indices() + .prepareCreate("index") + .setMapping("k", "type=keyword") + .setSettings( + Settings.builder() + .put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + .get() + ); + indexRandom(true, client.prepareIndex("index").setSource("k", "hello")); + ensureSearchable("index"); + SearchResponse resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + OpenSearchAssertions.assertAllSuccessful(resp); + assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); + + assertCacheState(client, "index", 0, 1); + // Index but don't refresh + indexRandom(false, client.prepareIndex("index").setSource("k", "hello2")); + resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + // Should expect hit as here as refresh didn't happen + assertCacheState(client, "index", 1, 1); + + // Explicit refresh would invalidate cache + refresh(); + // Hit same query again + resp = client.prepareSearch("index").setRequestCache(true).setQuery(QueryBuilders.termQuery("k", "hello")).get(); + assertSearchResponse(resp); + // Should expect miss as key has changed due to change in IndexReader.CacheKey (due to refresh) + assertCacheState(client, "index", 1, 2); + } + private static void assertCacheState(Client client, String index, long expectedHits, long expectedMisses) { RequestCacheStats requestCacheStats = client.admin() .indices() @@ -650,6 +689,7 @@ private static void assertCacheState(Client client, String index, long expectedH Arrays.asList(expectedHits, expectedMisses, 0L), Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions()) ); + } } diff --git a/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java b/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java index d6b2bb239b2a1..f9a87b9e74214 100644 --- a/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java +++ b/server/src/main/java/org/opensearch/common/lucene/index/OpenSearchDirectoryReader.java @@ -40,6 +40,8 @@ import org.opensearch.core.index.shard.ShardId; import java.io.IOException; +import java.util.Optional; +import java.util.UUID; /** * A {@link org.apache.lucene.index.FilterDirectoryReader} that exposes @@ -53,11 +55,14 @@ public final class OpenSearchDirectoryReader extends FilterDirectoryReader { private final ShardId shardId; private final FilterDirectoryReader.SubReaderWrapper wrapper; + private final DelegatingCacheHelper delegatingCacheHelper; + private OpenSearchDirectoryReader(DirectoryReader in, FilterDirectoryReader.SubReaderWrapper wrapper, ShardId shardId) throws IOException { super(in, wrapper); this.wrapper = wrapper; this.shardId = shardId; + this.delegatingCacheHelper = new DelegatingCacheHelper(in.getReaderCacheHelper()); } /** @@ -70,7 +75,61 @@ public ShardId shardId() { @Override public CacheHelper getReaderCacheHelper() { // safe to delegate since this reader does not alter the index - return in.getReaderCacheHelper(); + return this.delegatingCacheHelper; + } + + public DelegatingCacheHelper getDelegatingCacheHelper() { + return this.delegatingCacheHelper; + } + + /** + * Wraps existing IndexReader cache helper which internally provides a way to wrap CacheKey. + * @opensearch.internal + */ + public class DelegatingCacheHelper implements CacheHelper { + private final CacheHelper cacheHelper; + private final DelegatingCacheKey serializableCacheKey; + + DelegatingCacheHelper(CacheHelper cacheHelper) { + this.cacheHelper = cacheHelper; + this.serializableCacheKey = new DelegatingCacheKey(Optional.ofNullable(cacheHelper).map(key -> getKey()).orElse(null)); + } + + @Override + public CacheKey getKey() { + return this.cacheHelper.getKey(); + } + + public DelegatingCacheKey getDelegatingCacheKey() { + return this.serializableCacheKey; + } + + @Override + public void addClosedListener(ClosedListener listener) { + this.cacheHelper.addClosedListener(listener); + } + } + + /** + * Wraps internal IndexReader.CacheKey and attaches a uniqueId to it which can be eventually be used instead of + * object itself for serialization purposes. + */ + public class DelegatingCacheKey { + private final CacheKey cacheKey; + private final String uniqueId; + + DelegatingCacheKey(CacheKey cacheKey) { + this.cacheKey = cacheKey; + this.uniqueId = UUID.randomUUID().toString(); + } + + public CacheKey getCacheKey() { + return this.cacheKey; + } + + public String getId() { + return uniqueId; + } } @Override diff --git a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java index 4a19f8eb8714d..6d5c23274dbd6 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/opensearch/indices/IndicesRequestCache.java @@ -51,7 +51,12 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.core.common.unit.ByteSizeValue; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.index.shard.IndexShard; import java.io.Closeable; import java.io.IOException; @@ -60,8 +65,10 @@ import java.util.HashSet; import java.util.Iterator; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentMap; +import java.util.function.Function; /** * The indices request cache allows to cache a shard level request stage responses, helping with improving @@ -103,13 +110,16 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest Property.NodeScope ); + private final static long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); + private final ConcurrentMap<CleanupKey, Boolean> registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set<CleanupKey> keysToClean = ConcurrentCollections.newConcurrentSet(); private final ByteSizeValue size; private final TimeValue expire; private final Cache<Key, BytesReference> cache; + private final Function<ShardId, Optional<CacheEntity>> cacheEntityLookup; - IndicesRequestCache(Settings settings) { + IndicesRequestCache(Settings settings, Function<ShardId, Optional<CacheEntity>> cacheEntityFunction) { this.size = INDICES_CACHE_QUERY_SIZE.get(settings); this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; long sizeInBytes = size.getBytes(); @@ -121,6 +131,7 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest cacheBuilder.setExpireAfterAccess(expire); } cache = cacheBuilder.build(); + this.cacheEntityLookup = cacheEntityFunction; } @Override @@ -135,23 +146,31 @@ void clear(CacheEntity entity) { @Override public void onRemoval(RemovalNotification<Key, BytesReference> notification) { - notification.getKey().entity.onRemoval(notification); + // In case this event happens for an old shard, we can safely ignore this as we don't keep track for old + // shards as part of request cache. + cacheEntityLookup.apply(notification.getKey().shardId).ifPresent(entity -> entity.onRemoval(notification)); } BytesReference getOrCompute( - CacheEntity cacheEntity, + IndicesService.IndexShardCacheEntity cacheEntity, CheckedSupplier<BytesReference, IOException> loader, DirectoryReader reader, BytesReference cacheKey ) throws Exception { assert reader.getReaderCacheHelper() != null; - final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey); + assert reader.getReaderCacheHelper() instanceof OpenSearchDirectoryReader.DelegatingCacheHelper; + + OpenSearchDirectoryReader.DelegatingCacheHelper delegatingCacheHelper = (OpenSearchDirectoryReader.DelegatingCacheHelper) reader + .getReaderCacheHelper(); + String readerCacheKeyId = delegatingCacheHelper.getDelegatingCacheKey().getId(); + assert readerCacheKeyId != null; + final Key key = new Key(((IndexShard) cacheEntity.getCacheIdentity()).shardId(), cacheKey, readerCacheKeyId); Loader cacheLoader = new Loader(cacheEntity, loader); BytesReference value = cache.computeIfAbsent(key, cacheLoader); if (cacheLoader.isLoaded()) { - key.entity.onMiss(); + cacheEntity.onMiss(); // see if its the first time we see this reader, and make sure to register a cleanup key - CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getReaderCacheHelper().getKey()); + CleanupKey cleanupKey = new CleanupKey(cacheEntity, readerCacheKeyId); if (!registeredClosedListeners.containsKey(cleanupKey)) { Boolean previous = registeredClosedListeners.putIfAbsent(cleanupKey, Boolean.TRUE); if (previous == null) { @@ -159,7 +178,7 @@ BytesReference getOrCompute( } } } else { - key.entity.onHit(); + cacheEntity.onHit(); } return value; } @@ -170,9 +189,14 @@ BytesReference getOrCompute( * @param reader the reader to invalidate the cache entry for * @param cacheKey the cache key to invalidate */ - void invalidate(CacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) { + void invalidate(IndicesService.IndexShardCacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) { assert reader.getReaderCacheHelper() != null; - cache.invalidate(new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey)); + String readerCacheKeyId = null; + if (reader instanceof OpenSearchDirectoryReader) { + IndexReader.CacheHelper cacheHelper = ((OpenSearchDirectoryReader) reader).getDelegatingCacheHelper(); + readerCacheKeyId = ((OpenSearchDirectoryReader.DelegatingCacheHelper) cacheHelper).getDelegatingCacheKey().getId(); + } + cache.invalidate(new Key(((IndexShard) cacheEntity.getCacheIdentity()).shardId(), cacheKey, readerCacheKeyId)); } /** @@ -240,6 +264,7 @@ interface CacheEntity extends Accountable { * Called when this entity instance is removed */ void onRemoval(RemovalNotification<Key, BytesReference> notification); + } /** @@ -247,22 +272,26 @@ interface CacheEntity extends Accountable { * * @opensearch.internal */ - public static class Key implements Accountable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class); - - public final CacheEntity entity; // use as identity equality - public final IndexReader.CacheKey readerCacheKey; + static class Key implements Accountable, Writeable { + public final ShardId shardId; // use as identity equality + public final String readerCacheKeyId; public final BytesReference value; - Key(CacheEntity entity, IndexReader.CacheKey readerCacheKey, BytesReference value) { - this.entity = entity; - this.readerCacheKey = Objects.requireNonNull(readerCacheKey); + Key(ShardId shardId, BytesReference value, String readerCacheKeyId) { + this.shardId = shardId; this.value = value; + this.readerCacheKeyId = Objects.requireNonNull(readerCacheKeyId); + } + + Key(StreamInput in) throws IOException { + this.shardId = in.readOptionalWriteable(ShardId::new); + this.readerCacheKeyId = in.readOptionalString(); + this.value = in.readBytesReference(); } @Override public long ramBytesUsed() { - return BASE_RAM_BYTES_USED + entity.ramBytesUsed() + value.length(); + return BASE_RAM_BYTES_USED + shardId.getBaseRamBytesUsed() + value.length(); } @Override @@ -276,28 +305,35 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Key key = (Key) o; - if (Objects.equals(readerCacheKey, key.readerCacheKey) == false) return false; - if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false; + if (!Objects.equals(readerCacheKeyId, key.readerCacheKeyId)) return false; + if (!shardId.equals(key.shardId)) return false; if (!value.equals(key.value)) return false; return true; } @Override public int hashCode() { - int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + readerCacheKey.hashCode(); + int result = shardId.hashCode(); + result = 31 * result + readerCacheKeyId.hashCode(); result = 31 * result + value.hashCode(); return result; } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(shardId); + out.writeOptionalString(readerCacheKeyId); + out.writeBytesReference(value); + } } private class CleanupKey implements IndexReader.ClosedListener { final CacheEntity entity; - final IndexReader.CacheKey readerCacheKey; + final String readerCacheKeyId; - private CleanupKey(CacheEntity entity, IndexReader.CacheKey readerCacheKey) { + private CleanupKey(CacheEntity entity, String readerCacheKeyId) { this.entity = entity; - this.readerCacheKey = readerCacheKey; + this.readerCacheKeyId = readerCacheKeyId; } @Override @@ -315,7 +351,7 @@ public boolean equals(Object o) { return false; } CleanupKey that = (CleanupKey) o; - if (Objects.equals(readerCacheKey, that.readerCacheKey) == false) return false; + if (!Objects.equals(readerCacheKeyId, that.readerCacheKeyId)) return false; if (!entity.getCacheIdentity().equals(that.entity.getCacheIdentity())) return false; return true; } @@ -323,7 +359,7 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = entity.getCacheIdentity().hashCode(); - result = 31 * result + Objects.hashCode(readerCacheKey); + result = 31 * result + Objects.hashCode(readerCacheKeyId); return result; } } @@ -339,9 +375,9 @@ synchronized void cleanCache() { for (Iterator<CleanupKey> iterator = keysToClean.iterator(); iterator.hasNext();) { CleanupKey cleanupKey = iterator.next(); iterator.remove(); - if (cleanupKey.readerCacheKey == null || cleanupKey.entity.isOpen() == false) { + if (cleanupKey.readerCacheKeyId == null || !cleanupKey.entity.isOpen()) { // null indicates full cleanup, as does a closed shard - currentFullClean.add(cleanupKey.entity.getCacheIdentity()); + currentFullClean.add(((IndexShard) cleanupKey.entity.getCacheIdentity()).shardId()); } else { currentKeysToClean.add(cleanupKey); } @@ -349,10 +385,13 @@ synchronized void cleanCache() { if (!currentKeysToClean.isEmpty() || !currentFullClean.isEmpty()) { for (Iterator<Key> iterator = cache.keys().iterator(); iterator.hasNext();) { Key key = iterator.next(); - if (currentFullClean.contains(key.entity.getCacheIdentity())) { + if (currentFullClean.contains(key.shardId)) { iterator.remove(); } else { - if (currentKeysToClean.contains(new CleanupKey(key.entity, key.readerCacheKey))) { + // If the flow comes here, then we should have a open shard available on node. + if (currentKeysToClean.contains( + new CleanupKey(cacheEntityLookup.apply(key.shardId).orElse(null), key.readerCacheKeyId) + )) { iterator.remove(); } } diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 5c3beaf8509bd..db5b93f073b03 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -409,7 +409,13 @@ public IndicesService( this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.analysisRegistry = analysisRegistry; this.indexNameExpressionResolver = indexNameExpressionResolver; - this.indicesRequestCache = new IndicesRequestCache(settings); + this.indicesRequestCache = new IndicesRequestCache(settings, (shardId -> { + IndexService indexService = this.indices.get(shardId.getIndex().getUUID()); + if (indexService == null) { + return Optional.empty(); + } + return Optional.of(new IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); this.indicesQueryCache = new IndicesQueryCache(settings); this.mapperRegistry = mapperRegistry; this.namedWriteableRegistry = namedWriteableRegistry; @@ -1744,7 +1750,6 @@ private BytesReference cacheShardLevelResult( BytesReference cacheKey, CheckedConsumer<StreamOutput, IOException> loader ) throws Exception { - IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard); CheckedSupplier<BytesReference, IOException> supplier = () -> { /* BytesStreamOutput allows to pass the expected size but by default uses * BigArrays.PAGE_SIZE_IN_BYTES which is 16k. A common cached result ie. @@ -1761,7 +1766,7 @@ private BytesReference cacheShardLevelResult( return out.bytes(); } }; - return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey); + return indicesRequestCache.getOrCompute(new IndexShardCacheEntity(shard), supplier, reader, cacheKey); } /** @@ -1769,11 +1774,12 @@ private BytesReference cacheShardLevelResult( * * @opensearch.internal */ - static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { + public static class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { + private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IndexShardCacheEntity.class); private final IndexShard indexShard; - protected IndexShardCacheEntity(IndexShard indexShard) { + public IndexShardCacheEntity(IndexShard indexShard) { this.indexShard = indexShard; } diff --git a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java index 8494259c8fd8a..73728aec12e51 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesRequestCacheTests.java @@ -52,23 +52,36 @@ import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.common.bytes.AbstractBytesReference; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentHelper; +import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.IndexService; +import org.opensearch.index.cache.request.RequestCacheStats; import org.opensearch.index.cache.request.ShardRequestCache; import org.opensearch.index.query.TermQueryBuilder; -import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.shard.IndexShardState; +import org.opensearch.test.OpenSearchSingleNodeTestCase; import java.io.IOException; import java.util.Arrays; -import java.util.concurrent.atomic.AtomicBoolean; +import java.util.Optional; +import java.util.UUID; -public class IndicesRequestCacheTests extends OpenSearchTestCase { +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class IndicesRequestCacheTests extends OpenSearchSingleNodeTestCase { public void testBasicOperationsCache() throws Exception { - ShardRequestCache requestCacheStats = new ShardRequestCache(); - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache( + Settings.EMPTY, + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) + ); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -76,13 +89,13 @@ public void testBasicOperationsCache() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - AtomicBoolean indexShard = new AtomicBoolean(true); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + ShardRequestCache requestCacheStats = indexShard.requestCache(); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -90,10 +103,11 @@ public void testBasicOperationsCache() throws Exception { assertEquals(1, cache.count()); // cache hit - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = indexShard.requestCache(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -106,7 +120,7 @@ public void testBasicOperationsCache() throws Exception { if (randomBoolean()) { reader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(entity); } cache.cleanCache(); @@ -122,9 +136,17 @@ public void testBasicOperationsCache() throws Exception { } public void testCacheDifferentReaders() throws Exception { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -141,9 +163,10 @@ public void testCacheDifferentReaders() throws Exception { DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); + ShardRequestCache requestCacheStats = entity.stats(); assertEquals("foo", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); @@ -155,9 +178,10 @@ public void testCacheDifferentReaders() throws Exception { assertEquals(1, cache.numRegisteredCloseListeners()); // cache the second - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(secondReader, 0); value = cache.getOrCompute(entity, loader, secondReader, termBytes); + requestCacheStats = entity.stats(); assertEquals("bar", value.streamInput().readString()); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -167,9 +191,10 @@ public void testCacheDifferentReaders() throws Exception { assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > cacheSize + value.length()); assertEquals(2, cache.numRegisteredCloseListeners()); - secondEntity = new TestEntity(requestCacheStats, indexShard); + secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(secondReader, 0); value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes); + requestCacheStats = entity.stats(); assertEquals("bar", value.streamInput().readString()); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); @@ -177,10 +202,11 @@ public void testCacheDifferentReaders() throws Exception { assertTrue(loader.loadedFromCache); assertEquals(2, cache.count()); - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -201,7 +227,7 @@ public void testCacheDifferentReaders() throws Exception { if (randomBoolean()) { secondReader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(secondEntity); } cache.cleanCache(); @@ -218,9 +244,11 @@ public void testCacheDifferentReaders() throws Exception { public void testEviction() throws Exception { final ByteSizeValue size; { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache( + Settings.EMPTY, + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) + ); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -228,26 +256,26 @@ public void testEviction() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - size = requestCacheStats.stats().getMemorySize(); + size = indexShard.requestCache().stats().getMemorySize(); IOUtils.close(reader, secondReader, writer, dir, cache); } + IndexShard indexShard = createIndex("test1").getShard(0); IndicesRequestCache cache = new IndicesRequestCache( - Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build() + Settings.builder().put(IndicesRequestCache.INDICES_CACHE_QUERY_SIZE.getKey(), size.getBytes() + 1 + "b").build(), + (shardId -> Optional.of(new IndicesService.IndexShardCacheEntity(indexShard))) ); - AtomicBoolean indexShard = new AtomicBoolean(true); - ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -255,36 +283,44 @@ public void testEviction() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity thirddEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader thirdLoader = new Loader(thirdReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); + logger.info("Memory size: {}", indexShard.requestCache().stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(2, cache.count()); - assertEquals(1, requestCacheStats.stats().getEvictions()); + assertEquals(1, indexShard.requestCache().stats().getEvictions()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); } public void testClearAllEntityIdentity() throws Exception { - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); - AtomicBoolean indexShard = new AtomicBoolean(true); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); - ShardRequestCache requestCacheStats = new ShardRequestCache(); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -292,36 +328,39 @@ public void testClearAllEntityIdentity() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "bar")); DirectoryReader secondReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity secondEntity = new IndicesService.IndexShardCacheEntity(indexShard); Loader secondLoader = new Loader(secondReader, 0); writer.updateDocument(new Term("id", "0"), newDoc(0, "baz")); DirectoryReader thirdReader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); - AtomicBoolean differentIdentity = new AtomicBoolean(true); - TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity); + IndicesService.IndexShardCacheEntity thirddEntity = new IndicesService.IndexShardCacheEntity(createIndex("test1").getShard(0)); Loader thirdLoader = new Loader(thirdReader, 0); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value1.streamInput().readString()); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); assertEquals("bar", value2.streamInput().readString()); - logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); + logger.info("Memory size: {}", indexShard.requestCache().stats().getMemorySize()); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); assertEquals("baz", value3.streamInput().readString()); assertEquals(3, cache.count()); - final long hitCount = requestCacheStats.stats().getHitCount(); + RequestCacheStats requestCacheStats = entity.stats().stats(); + requestCacheStats.add(thirddEntity.stats().stats()); + final long hitCount = requestCacheStats.getHitCount(); // clear all for the indexShard Idendity even though is't still open cache.clear(randomFrom(entity, secondEntity)); cache.cleanCache(); assertEquals(1, cache.count()); // third has not been validated since it's a different identity value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); - assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); + requestCacheStats = entity.stats().stats(); + requestCacheStats.add(thirddEntity.stats().stats()); + assertEquals(hitCount + 1, requestCacheStats.getHitCount()); assertEquals("baz", value3.streamInput().readString()); IOUtils.close(reader, secondReader, thirdReader, writer, dir, cache); @@ -365,8 +404,17 @@ public BytesReference get() { } public void testInvalidate() throws Exception { - ShardRequestCache requestCacheStats = new ShardRequestCache(); - IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexShard indexShard = createIndex("test").getShard(0); + IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY, (shardId -> { + IndexService indexService = null; + try { + indexService = indicesService.indexServiceSafe(shardId.getIndex()); + } catch (IndexNotFoundException ex) { + return Optional.empty(); + } + return Optional.of(new IndicesService.IndexShardCacheEntity(indexService.getShard(shardId.id()))); + })); Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig()); @@ -374,13 +422,13 @@ public void testInvalidate() throws Exception { DirectoryReader reader = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1)); TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); - AtomicBoolean indexShard = new AtomicBoolean(true); // initial cache - TestEntity entity = new TestEntity(requestCacheStats, indexShard); + IndicesService.IndexShardCacheEntity entity = new IndicesService.IndexShardCacheEntity(indexShard); Loader loader = new Loader(reader, 0); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + ShardRequestCache requestCacheStats = entity.stats(); assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -388,10 +436,11 @@ public void testInvalidate() throws Exception { assertEquals(1, cache.count()); // cache hit - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -401,11 +450,12 @@ public void testInvalidate() throws Exception { assertEquals(1, cache.numRegisteredCloseListeners()); // load again after invalidate - entity = new TestEntity(requestCacheStats, indexShard); + entity = new IndicesService.IndexShardCacheEntity(indexShard); loader = new Loader(reader, 0); cache.invalidate(entity, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes); assertEquals("foo", value.streamInput().readString()); + requestCacheStats = entity.stats(); assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(0, requestCacheStats.stats().getEvictions()); @@ -418,7 +468,7 @@ public void testInvalidate() throws Exception { if (randomBoolean()) { reader.close(); } else { - indexShard.set(false); // closed shard but reader is still open + indexShard.close("test", true, true); // closed shard but reader is still open cache.clear(entity); } cache.cleanCache(); @@ -433,22 +483,25 @@ public void testInvalidate() throws Exception { } public void testEqualsKey() throws IOException { - AtomicBoolean trueBoolean = new AtomicBoolean(true); - AtomicBoolean falseBoolean = new AtomicBoolean(false); + IndicesService indicesService = getInstanceFromNode(IndicesService.class); Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig(); IndexWriter writer = new IndexWriter(dir, config); - IndexReader reader1 = DirectoryReader.open(writer); - IndexReader.CacheKey rKey1 = reader1.getReaderCacheHelper().getKey(); + ShardId shardId = new ShardId("foo", "bar", 1); + ShardId shardId1 = new ShardId("foo1", "bar1", 2); + IndexReader reader1 = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), shardId); + String rKey1 = ((OpenSearchDirectoryReader) reader1).getDelegatingCacheHelper().getDelegatingCacheKey().getId(); writer.addDocument(new Document()); - IndexReader reader2 = DirectoryReader.open(writer); - IndexReader.CacheKey rKey2 = reader2.getReaderCacheHelper().getKey(); + IndexReader reader2 = OpenSearchDirectoryReader.wrap(DirectoryReader.open(writer), shardId); + String rKey2 = ((OpenSearchDirectoryReader) reader2).getDelegatingCacheHelper().getDelegatingCacheKey().getId(); IOUtils.close(reader1, reader2, writer, dir); - IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), rKey1, new TestBytesReference(1)); - IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey2, new TestBytesReference(1)); - IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(2)); + IndexShard indexShard = mock(IndexShard.class); + when(indexShard.state()).thenReturn(IndexShardState.STARTED); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(shardId1, new TestBytesReference(1), rKey1); + IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(shardId, new TestBytesReference(1), rKey2); + IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(shardId, new TestBytesReference(2), rKey2); String s = "Some other random object"; assertEquals(key1, key1); assertEquals(key1, key2); @@ -459,6 +512,29 @@ public void testEqualsKey() throws IOException { assertNotEquals(key1, key5); } + public void testSerializationDeserializationOfCacheKey() throws Exception { + TermQueryBuilder termQuery = new TermQueryBuilder("id", "0"); + BytesReference termBytes = XContentHelper.toXContent(termQuery, MediaTypeRegistry.JSON, false); + IndexService indexService = createIndex("test"); + IndexShard indexShard = indexService.getShard(0); + IndicesService.IndexShardCacheEntity shardCacheEntity = new IndicesService.IndexShardCacheEntity(indexShard); + String readerCacheKeyId = UUID.randomUUID().toString(); + IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(indexShard.shardId(), termBytes, readerCacheKeyId); + BytesReference bytesReference = null; + try (BytesStreamOutput out = new BytesStreamOutput()) { + key1.writeTo(out); + bytesReference = out.bytes(); + } + StreamInput in = bytesReference.streamInput(); + + IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(in); + + assertEquals(readerCacheKeyId, key2.readerCacheKeyId); + assertEquals(((IndexShard) shardCacheEntity.getCacheIdentity()).shardId(), key2.shardId); + assertEquals(termBytes, key2.value); + + } + private class TestBytesReference extends AbstractBytesReference { int dummyValue; @@ -509,34 +585,4 @@ public boolean isFragment() { return false; } } - - private class TestEntity extends AbstractIndexShardCacheEntity { - private final AtomicBoolean standInForIndexShard; - private final ShardRequestCache shardRequestCache; - - private TestEntity(ShardRequestCache shardRequestCache, AtomicBoolean standInForIndexShard) { - this.standInForIndexShard = standInForIndexShard; - this.shardRequestCache = shardRequestCache; - } - - @Override - protected ShardRequestCache stats() { - return shardRequestCache; - } - - @Override - public boolean isOpen() { - return standInForIndexShard.get(); - } - - @Override - public Object getCacheIdentity() { - return standInForIndexShard; - } - - @Override - public long ramBytesUsed() { - return 42; - } - } } diff --git a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java index add1dfc348a8b..8a00cd2db21c9 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesServiceCloseTests.java @@ -44,18 +44,15 @@ import org.apache.lucene.search.Weight; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.routing.allocation.DiskThresholdSettings; -import org.opensearch.common.cache.RemovalNotification; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexService; import org.opensearch.index.engine.Engine; import org.opensearch.index.shard.IndexShard; -import org.opensearch.indices.IndicesRequestCache.Key; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.node.MockNode; import org.opensearch.node.Node; @@ -373,15 +370,12 @@ public void testCloseWhileOngoingRequestUsesRequestCache() throws Exception { assertEquals(1, indicesService.indicesRefCount.refCount()); assertEquals(0L, cache.count()); - IndicesRequestCache.CacheEntity cacheEntity = new IndicesRequestCache.CacheEntity() { + IndicesService.IndexShardCacheEntity cacheEntity = new IndicesService.IndexShardCacheEntity(shard) { @Override public long ramBytesUsed() { return 42; } - @Override - public void onCached(Key key, BytesReference value) {} - @Override public boolean isOpen() { return true; @@ -389,17 +383,8 @@ public boolean isOpen() { @Override public Object getCacheIdentity() { - return this; + return shard; } - - @Override - public void onHit() {} - - @Override - public void onMiss() {} - - @Override - public void onRemoval(RemovalNotification<Key, BytesReference> notification) {} }; cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), searcher.getDirectoryReader(), new BytesArray("foo")); assertEquals(1L, cache.count()); From bbe790bd9cbfe44035f63f22d196a694be5ff3ab Mon Sep 17 00:00:00 2001 From: Neetika Singhal <neetiks@amazon.com> Date: Wed, 10 Jan 2024 20:53:07 -0800 Subject: [PATCH 080/178] Fix SimpleNestedIT.testExplain flaky test (#11681) Signed-off-by: Neetika Singhal <neetiks@amazon.com> --- .../search/nested/SimpleNestedExplainIT.java | 121 ++++++++++++++++++ .../search/nested/SimpleNestedIT.java | 9 +- 2 files changed, 128 insertions(+), 2 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java new file mode 100644 index 0000000000000..71f82d7c0b412 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.nested; + +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.join.ScoreMode; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.test.OpenSearchIntegTestCase; + +import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.query.QueryBuilders.nestedQuery; +import static org.opensearch.index.query.QueryBuilders.termQuery; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.equalTo; + +/** + * Creating a separate class with no parameterization to create and index documents in a single + * test run and compare search responses across concurrent and non-concurrent search. For more details, + * refer: https://github.com/opensearch-project/OpenSearch/issues/11413 + */ +public class SimpleNestedExplainIT extends OpenSearchIntegTestCase { + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + /* + * Tests the explain output for multiple docs. Concurrent search with multiple slices is tested + * here as call to indexRandomForMultipleSlices is made and compared with explain output for + * non-concurrent search use-case. Separate test class is created to test explain for 1 slice + * case in concurrent search, refer {@link SimpleExplainIT#testExplainWithSingleDoc} + * For more details, refer: https://github.com/opensearch-project/OpenSearch/issues/11413 + * */ + public void testExplainMultipleDocs() throws Exception { + assertAcked( + prepareCreate("test").setMapping( + jsonBuilder().startObject() + .startObject("properties") + .startObject("nested1") + .field("type", "nested") + .endObject() + .endObject() + .endObject() + ) + ); + + ensureGreen(); + + client().prepareIndex("test") + .setId("1") + .setSource( + jsonBuilder().startObject() + .field("field1", "value1") + .startArray("nested1") + .startObject() + .field("n_field1", "n_value1") + .endObject() + .startObject() + .field("n_field1", "n_value1") + .endObject() + .endArray() + .endObject() + ) + .setRefreshPolicy(IMMEDIATE) + .get(); + + indexRandomForMultipleSlices("test"); + + // Turn off the concurrent search setting to test search with non-concurrent search + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build()) + .get(); + + SearchResponse nonConSearchResp = client().prepareSearch("test") + .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) + .setExplain(true) + .get(); + assertNoFailures(nonConSearchResp); + assertThat(nonConSearchResp.getHits().getTotalHits().value, equalTo(1L)); + Explanation nonConSearchExplain = nonConSearchResp.getHits().getHits()[0].getExplanation(); + assertThat(nonConSearchExplain.getValue(), equalTo(nonConSearchResp.getHits().getHits()[0].getScore())); + + // Turn on the concurrent search setting to test search with concurrent search + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build()) + .get(); + + SearchResponse conSearchResp = client().prepareSearch("test") + .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) + .setExplain(true) + .get(); + assertNoFailures(conSearchResp); + assertThat(conSearchResp.getHits().getTotalHits().value, equalTo(1L)); + Explanation conSearchExplain = conSearchResp.getHits().getHits()[0].getExplanation(); + assertThat(conSearchExplain.getValue(), equalTo(conSearchResp.getHits().getHits()[0].getScore())); + + // assert that the explanation for concurrent search should be equal to the non-concurrent search's explanation + assertEquals(nonConSearchExplain, conSearchExplain); + client().admin() + .cluster() + .prepareUpdateSettings() + .setPersistentSettings(Settings.builder().putNull(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()).build()) + .get(); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index 6d0b074c3a660..8eeffcbecb377 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -455,7 +455,13 @@ public void testDeleteNestedDocsWithAlias() throws Exception { assertDocumentCount("test", 6); } - public void testExplain() throws Exception { + /* + * Tests the explain output for single doc. Concurrent search with only slice 1 is tested + * here as call to indexRandomForMultipleSlices has implications on the range of child docs + * in the explain output. Separate test class is created to test explain for multiple slices + * case in concurrent search, refer {@link SimpleNestedExplainIT} + * */ + public void testExplainWithSingleDoc() throws Exception { assertAcked( prepareCreate("test").setMapping( jsonBuilder().startObject() @@ -487,7 +493,6 @@ public void testExplain() throws Exception { ) .setRefreshPolicy(IMMEDIATE) .get(); - indexRandomForConcurrentSearch("test"); SearchResponse searchResponse = client().prepareSearch("test") .setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1"), ScoreMode.Total)) From b0426881ed6e0271ef9110b00e3d44ad83116935 Mon Sep 17 00:00:00 2001 From: Jay Deng <jayd0104@gmail.com> Date: Thu, 11 Jan 2024 05:26:15 -0800 Subject: [PATCH 081/178] Correctly calculate doc_count_error at the slice level for concurrent segment search. Change slice_size heuristic to be equal to shard_size. (#11732) Signed-off-by: Jay Deng <jayd0104@gmail.com> --- CHANGELOG.md | 1 + .../aggregations/bucket/ShardSizeTermsIT.java | 18 +- .../bucket/TermsDocCountErrorIT.java | 10 +- .../bucket/TermsFixedDocCountErrorIT.java | 347 ++++++++++++++++++ .../bucket/TermsShardMinDocCountIT.java | 16 +- .../bucket/terms/InternalTerms.java | 21 +- .../search/internal/SearchContext.java | 11 +- 7 files changed, 403 insertions(+), 21 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 0b256473dec6f..b638b63dd52a0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -192,6 +192,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) - Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) - Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) +- Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java index 145830f02ee56..7c7cc12888307 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -86,6 +86,7 @@ public void testShardSizeEqualsSizeString() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -98,8 +99,11 @@ public void testShardSizeEqualsSizeString() throws Exception { expected.put("1", 8L); expected.put("3", 8L); expected.put("2", 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsString()))); + expectedDocCount = expected.get(bucket.getKeyAsString()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } @@ -221,6 +225,7 @@ public void testShardSizeEqualsSizeLong() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -233,8 +238,11 @@ public void testShardSizeEqualsSizeLong() throws Exception { expected.put(1, 8L); expected.put(3, 8L); expected.put(2, 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } @@ -355,6 +363,7 @@ public void testShardSizeEqualsSizeDouble() throws Exception { terms("keys").field("key") .size(3) .shardSize(3) + .showTermDocCountError(true) .collectMode(randomFrom(SubAggCollectionMode.values())) .order(BucketOrder.count(false)) ) @@ -367,8 +376,11 @@ public void testShardSizeEqualsSizeDouble() throws Exception { expected.put(1, 8L); expected.put(3, 8L); expected.put(2, 4L); + Long expectedDocCount; for (Terms.Bucket bucket : buckets) { - assertThat(bucket.getDocCount(), equalTo(expected.get(bucket.getKeyAsNumber().intValue()))); + expectedDocCount = expected.get(bucket.getKeyAsNumber().intValue()); + // Doc count can vary when using concurrent segment search. See https://github.com/opensearch-project/OpenSearch/issues/11680 + assertTrue((bucket.getDocCount() == expectedDocCount) || bucket.getDocCount() + bucket.getDocCountError() >= expectedDocCount); } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index b355ce6d7a8dd..343cea4b94c87 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -225,8 +225,16 @@ public void setupSuiteScopeCluster() throws Exception { } indexRandom(true, builders); - indexRandomForMultipleSlices("idx"); ensureSearchable(); + + // Force merge each shard down to 1 segment to verify results are the same between concurrent and non-concurrent search paths, else + // for concurrent segment search there will be additional error introduced during the slice level reduce and thus different buckets, + // doc_counts, and doc_count_errors may be returned. This test serves to verify that the doc_count_error is the same between + // concurrent and non-concurrent search in the 1 slice case. TermsFixedDocCountErrorIT verifies that the doc count error is + // correctly calculated for concurrent segment search at the slice level. + // See https://github.com/opensearch-project/OpenSearch/issues/11680" + forceMerge(1); + Thread.sleep(5000); // Sleep 5s to ensure force merge completes } private void assertDocCountErrorWithinBounds(int size, SearchResponse accurateResponse, SearchResponse testResponse) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java new file mode 100644 index 0000000000000..5ad913e8c7086 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -0,0 +1,347 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.indices.segments.IndicesSegmentResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; +import org.opensearch.search.aggregations.bucket.terms.Terms; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; + +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.opensearch.search.aggregations.AggregationBuilders.terms; +import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) +public class TermsFixedDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { + + private static final String STRING_FIELD_NAME = "s_value"; + + public TermsFixedDocCountErrorIT(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSimpleAggErrorMultiShard() throws Exception { + // size = 1, shard_size = 2 + // Shard_1 [A, A, A, A, B, B, C, C, D, D] -> Buckets {"A" : 4, "B" : 2} + // Shard_2 [A, B, B, B, C, C, C, D, D, D] -> Buckets {"B" : 3, "C" : 3} + // coordinator -> Buckets {"B" : 5, "A" : 4} + // Agg error is 4, from (shard_size)th bucket on each shard + // Bucket "A" error is 2, from (shard_size)th bucket on shard_2 + // Bucket "B" error is 0, it's present on both shards + + // size = 1 shard_size = 1 slice_size = 1 + // non-cs / cs + // Shard_1 [A, B, C] + // Shard_2 [B, C, D] + // cs + // Shard_1 slice_1 [A, B, C] -> {a : 1} -> {a : 1 -- error: 1} + // slice_2 [B, C, D] -> {b : 1} + // Coordinator should return the same doc count error in both cases + + assertAcked( + prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_mshard_1").get(); + assertEquals(1, segmentResponse.getIndices().get("idx_mshard_1").getShards().get(0).getShards()[0].getSegments().size()); + + assertAcked( + prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + segmentResponse = client().admin().indices().prepareSegments("idx_mshard_2").get(); + assertEquals(1, segmentResponse.getIndices().get("idx_mshard_2").getShards().get(0).getShards()[0].getSegments().size()); + + SearchResponse response = client().prepareSearch("idx_mshard_2", "idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(4, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); // Bucket "A" + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + } + + public void testSimpleAggErrorSingleShard() throws Exception { + assertAcked( + prepareCreate("idx_shard_error").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + refresh("idx_shard_error"); + + SearchResponse response = client().prepareSearch("idx_shard_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(1).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(0, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("A", bucket.getKey().toString()); + assertEquals(6, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + } + + public void testSliceLevelDocCountErrorSingleShard() throws Exception { + assumeTrue( + "Slice level error is not relevant to non-concurrent search cases", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); + + // Slices are created by sorting segments by doc count in descending order then distributing in round robin fashion. + // Creates 2 segments (and therefore 2 slices since slice_count = 2) as follows: + // 1. [A, A, A, B, B, C] + // 2. [A, B, B, B, C, C] + // Thus we expect the doc count error for A to be 2 as the nth largest bucket on slice 2 has size 2 + + assertAcked( + prepareCreate("idx_slice_error").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_slice_error"); + + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_slice_error"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_slice_error").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_slice_error").getShards().get(0).getShards()[0].getSegments().size()); + + // Confirm that there is no error when shard_size == slice_size > cardinality + SearchResponse response = client().prepareSearch("idx_slice_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(1).shardSize(4)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(1, terms.getBuckets().size()); + assertEquals(0, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + response = client().prepareSearch("idx_slice_error") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(4, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); // Bucket "B" + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); // Bucket "A" + assertEquals("A", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + } + + public void testSliceLevelDocCountErrorMultiShard() throws Exception { + assumeTrue( + "Slice level error is not relevant to non-concurrent search cases", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); + + // Size = 2, shard_size = 2 + // Shard_1 [A, A, A, A, B, B, C, C] + // slice_1 [A, A, A, B, B, C] {"A" : 3, "B" : 2} + // slice_2 [A, C] {"A" : 1, "C" : 1} + // Shard_1 buckets: {"A" : 4 - error: 0, "B" : 2 - error: 1} + // Shard_2 [A, A, B, B, B, C, C, C] + // slice_1 [A, B, B, B, C, C] {"B" : 3, "C" : 2} + // slice_2 [A, C] {"A" : 1, "C" : 1} + // Shard_2 buckets: {"B" : 3 - error: 1, "C" : 3 - error: 0} + // Overall + // {"B" : 5 - error: 2, "A" : 4 - error: 3} Agg error: 6 + + assertAcked( + prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_1"); + + IndicesSegmentResponse segmentResponse = client().admin().indices().prepareSegments("idx_mshard_1").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_mshard_1").getShards().get(0).getShards()[0].getSegments().size()); + + SearchResponse response = client().prepareSearch("idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + Terms terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(3, terms.getDocCountError()); + + Terms.Bucket bucket = terms.getBuckets().get(0); + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("B", bucket.getKey().toString()); + assertEquals(2, bucket.getDocCount()); + assertEquals(1, bucket.getDocCountError()); + + assertAcked( + prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") + .setSettings( + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + ) + ); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "B").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); + client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "C").endObject()).get(); + refresh("idx_mshard_2"); + + segmentResponse = client().admin().indices().prepareSegments("idx_mshard_2").get(); + assertEquals(2, segmentResponse.getIndices().get("idx_mshard_2").getShards().get(0).getShards()[0].getSegments().size()); + + response = client().prepareSearch("idx_mshard_2") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(3, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); + assertEquals("B", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(1, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("C", bucket.getKey().toString()); + assertEquals(3, bucket.getDocCount()); + assertEquals(0, bucket.getDocCountError()); + + response = client().prepareSearch("idx_mshard_2", "idx_mshard_1") + .setSize(0) + .addAggregation(terms("terms").field(STRING_FIELD_NAME).showTermDocCountError(true).size(2).shardSize(2)) + .get(); + + terms = response.getAggregations().get("terms"); + assertEquals(2, terms.getBuckets().size()); + assertEquals(6, terms.getDocCountError()); + + bucket = terms.getBuckets().get(0); + assertEquals("B", bucket.getKey().toString()); + assertEquals(5, bucket.getDocCount()); + assertEquals(2, bucket.getDocCountError()); + + bucket = terms.getBuckets().get(1); + assertEquals("A", bucket.getKey().toString()); + assertEquals(4, bucket.getDocCount()); + assertEquals(3, bucket.getDocCountError()); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index b0d8e7ea02e8f..3851b16551795 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -88,6 +88,10 @@ private static String randomExecutionHint() { // see https://github.com/elastic/elasticsearch/issues/5998 public void testShardMinDocCountSignificantTermsTest() throws Exception { + assumeFalse( + "For concurrent segment search shard_min_doc_count is not enforced at the slice level. See https://github.com/opensearch-project/OpenSearch/issues/11847", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); String textMappings; if (randomBoolean()) { textMappings = "type=long"; @@ -157,6 +161,10 @@ private void addTermsDocs(String term, int numInClass, int numNotInClass, List<I // see https://github.com/elastic/elasticsearch/issues/5998 public void testShardMinDocCountTermsTest() throws Exception { + assumeFalse( + "For concurrent segment search shard_min_doc_count is not enforced at the slice level. See https://github.com/opensearch-project/OpenSearch/issues/11847", + internalCluster().clusterService().getClusterSettings().get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING) + ); final String[] termTypes = { "text", "long", "integer", "float", "double" }; String termtype = termTypes[randomInt(termTypes.length - 1)]; String termMappings = "type=" + termtype; @@ -189,8 +197,8 @@ public void testShardMinDocCountTermsTest() throws Exception { ) .get(); assertSearchResponse(response); - Terms sigterms = response.getAggregations().get("myTerms"); - assertThat(sigterms.getBuckets().size(), equalTo(0)); + Terms terms = response.getAggregations().get("myTerms"); + assertThat(terms.getBuckets().size(), equalTo(0)); response = client().prepareSearch(index) .addAggregation( @@ -204,8 +212,8 @@ public void testShardMinDocCountTermsTest() throws Exception { ) .get(); assertSearchResponse(response); - sigterms = response.getAggregations().get("myTerms"); - assertThat(sigterms.getBuckets().size(), equalTo(2)); + terms = response.getAggregations().get("myTerms"); + assertThat(terms.getBuckets().size(), equalTo(2)); } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java index 0e773291881cf..b8f9406ff55b9 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java @@ -225,6 +225,7 @@ public int hashCode() { protected final int requiredSize; protected final long minDocCount; protected final TermsAggregator.BucketCountThresholds bucketCountThresholds; + private boolean hasSliceLevelDocCountError = false; /** * Creates a new {@link InternalTerms} @@ -299,9 +300,7 @@ private BucketOrder getReduceOrder(List<InternalAggregation> aggregations) { private long getDocCountError(InternalTerms<?, ?> terms, ReduceContext reduceContext) { int size = terms.getBuckets().size(); - // doc_count_error is always computed at the coordinator based on the buckets returned by the shards. This should be 0 during the - // shard level reduce as no buckets are being pruned at this stage. - if (reduceContext.isSliceLevel() || size == 0 || size < terms.getShardSize() || isKeyOrder(terms.order)) { + if (size == 0 || size < terms.getShardSize() || isKeyOrder(terms.order)) { return 0; } else if (InternalOrder.isCountDesc(terms.order)) { if (terms.getDocCountError() > 0) { @@ -398,6 +397,12 @@ public InternalAggregation reduce(List<InternalAggregation> aggregations, Reduce for (InternalAggregation aggregation : aggregations) { @SuppressWarnings("unchecked") InternalTerms<A, B> terms = (InternalTerms<A, B>) aggregation; + // For Concurrent Segment Search the aggregation will have a computed doc count error coming from the shards. + // We use the existence of this doc count error to determine whether or not doc count error originated from the slice level + // and if so we will maintain the doc count error for the 1 shard case at the coordinator level + if (aggregations.size() == 1 && terms.getDocCountError() > 0) { + hasSliceLevelDocCountError = true; + } if (referenceTerms == null && aggregation.getClass().equals(UnmappedTerms.class) == false) { referenceTerms = terms; } @@ -500,7 +505,11 @@ For backward compatibility, we disable the merge sort and use ({@link InternalTe if (sumDocCountError == -1) { docCountError = -1; } else { - docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + if (hasSliceLevelDocCountError) { + docCountError = sumDocCountError; + } else { + docCountError = aggregations.size() == 1 ? 0 : sumDocCountError; + } } // Shards must return buckets sorted by key, so we apply the sort here in shard level reduce @@ -512,7 +521,7 @@ For backward compatibility, we disable the merge sort and use ({@link InternalTe @Override protected B reduceBucket(List<B> buckets, ReduceContext context) { - assert buckets.size() > 0; + assert !buckets.isEmpty(); long docCount = 0; // For the per term doc count error we add up the errors from the // shards that did not respond with the term. To do this we add up @@ -523,7 +532,7 @@ protected B reduceBucket(List<B> buckets, ReduceContext context) { for (B bucket : buckets) { docCount += bucket.getDocCount(); if (docCountError != -1) { - if (bucket.showDocCountError() == false || bucket.getDocCountError() == -1) { + if (bucket.showDocCountError() == false) { docCountError = -1; } else { docCountError += bucket.getDocCountError(); diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index cc43f4e5d79fb..02837da64dafd 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -86,8 +86,6 @@ import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; -import static org.opensearch.search.aggregations.bucket.BucketUtils.suggestShardSideQueueSize; - /** * This class encapsulates the state needed to execute a search. It holds a reference to the * shards point in time snapshot (IndexReader / ContextIndexSearcher) and allows passing on @@ -410,11 +408,10 @@ public boolean shouldUseConcurrentSearch() { * Returns local bucket count thresholds based on concurrent segment search status */ public LocalBucketCountThresholds asLocalBucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) { - if (shouldUseConcurrentSearch()) { - return new LocalBucketCountThresholds(0, suggestShardSideQueueSize(bucketCountThresholds.getShardSize())); - } else { - return new LocalBucketCountThresholds(bucketCountThresholds.getShardMinDocCount(), bucketCountThresholds.getShardSize()); - } + return new LocalBucketCountThresholds( + shouldUseConcurrentSearch() ? 0 : bucketCountThresholds.getShardMinDocCount(), + bucketCountThresholds.getShardSize() + ); } /** From 3d577145cc1c3d2ec916ca3ff5a3472c98132f2b Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Thu, 11 Jan 2024 06:41:55 -0800 Subject: [PATCH 082/178] Update runTask to optionally install plugins (#11844) Signed-off-by: Marc Handalian <marc.handalian@gmail.com> --- DEVELOPER_GUIDE.md | 8 +++++++- gradle/run.gradle | 6 ++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index f9936aad0cf8c..21adbb0305ab1 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -183,6 +183,12 @@ Run OpenSearch using `gradlew run`. ./gradlew run ``` +[Plugins](plugins/) may be installed by passing a `-PinstalledPlugins` property: + +```bash +./gradlew run -PinstalledPlugins="['plugin1', 'plugin2']" +``` + That will build OpenSearch and start it, writing its log above Gradle's status message. We log a lot of stuff on startup, specifically these lines tell you that OpenSearch is ready. ``` @@ -578,7 +584,7 @@ explicitly marked by an annotation should not be extended by external implementa any time. The `@DeprecatedApi` annotation could also be added to any classes annotated with `@PublicApi` (or documented as `@opensearch.api`) or their methods that are either changed (with replacement) or planned to be removed across major versions. -The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`) +The APIs which are designated to be public but have not been stabilized yet should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`) annotation. The presence of this annotation signals that API may change at any time (major, minor or even patch releases). In general, the classes annotated with `@PublicApi` may expose other classes or methods annotated with `@ExperimentalApi`, in such cases the backward compatibility guarantees would not apply to latter (see please [Experimental Development](#experimental-development) for more details). diff --git a/gradle/run.gradle b/gradle/run.gradle index 639479e97d28f..34651f1d94964 100644 --- a/gradle/run.gradle +++ b/gradle/run.gradle @@ -39,6 +39,12 @@ testClusters { testDistribution = 'archive' if (numZones > 1) numberOfZones = numZones if (numNodes > 1) numberOfNodes = numNodes + if (findProperty("installedPlugins")) { + installedPlugins = Eval.me(installedPlugins) + for (String p : installedPlugins) { + plugin('plugins:'.concat(p)) + } + } } } From 3cf1ce68e6dff44033366aee2e8b77597a07d8ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 11 Jan 2024 11:22:59 -0500 Subject: [PATCH 083/178] Bump com.diffplug.spotless from 6.20.0 to 6.23.2 (#11797) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index b1cd1d532bfeb..296c30391af09 100644 --- a/build.gradle +++ b/build.gradle @@ -54,7 +54,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.20.0" apply false + id "com.diffplug.spotless" version "6.23.2" apply false id "org.gradle.test-retry" version "1.5.4" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' From 6aab36052055e24c2ef56454da9d3a1e4982ee6e Mon Sep 17 00:00:00 2001 From: Chenyang Ji <chenyang.yale@gmail.com> Date: Thu, 11 Jan 2024 12:45:43 -0800 Subject: [PATCH 084/178] Support dynamically adding SearchRequestOperationsListener (#11526) Along the way, also refactored TransportSearchAction.TimeProvider, so that it's no longer a (redundant) listener. --------- Signed-off-by: Chenyang Ji <cyji@amazon.com> --- CHANGELOG.md | 1 + .../search/SearchWeightedRoutingIT.java | 2 +- .../search/stats/SearchStatsIT.java | 2 +- .../search/AbstractSearchAsyncAction.java | 4 +- .../action/search/SearchRequestContext.java | 20 +-- ...estOperationsCompositeListenerFactory.java | 81 +++++++++++ .../SearchRequestOperationsListener.java | 27 +++- .../action/search/SearchRequestSlowLog.java | 18 ++- .../action/search/SearchRequestStats.java | 14 +- .../action/search/SearchResponseMerger.java | 6 +- .../action/search/TransportSearchAction.java | 135 ++++-------------- .../common/settings/ClusterSettings.java | 5 +- .../main/java/org/opensearch/node/Node.java | 16 ++- .../cluster/node/stats/NodeStatsTests.java | 9 +- .../AbstractSearchAsyncActionTests.java | 24 +++- .../CanMatchPreFilterSearchPhaseTests.java | 31 +++- .../action/search/SearchAsyncActionTests.java | 12 +- .../SearchQueryThenFetchAsyncActionTests.java | 7 +- ...erationsCompositeListenerFactoryTests.java | 131 +++++++++++++++++ ...earchRequestOperationsListenerSupport.java | 12 +- .../search/SearchRequestSlowLogTests.java | 24 +++- .../search/SearchRequestStatsTests.java | 35 ++++- .../search/SearchResponseMergerTests.java | 97 +++++++++++-- .../search/SearchTimeProviderTests.java | 54 ------- .../search/TransportSearchActionTests.java | 37 ++++- .../index/search/stats/SearchStatsTests.java | 5 +- .../indices/NodeIndicesStatsTests.java | 5 +- .../snapshots/SnapshotResiliencyTests.java | 9 +- 28 files changed, 577 insertions(+), 246 deletions(-) create mode 100644 server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java create mode 100644 server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java delete mode 100644 server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index b638b63dd52a0..1f64dcf82bd4d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -193,6 +193,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) - Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) - Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) +- Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) ### Deprecated diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java index aa1fe695ecc12..d1e66c19c28e2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWeightedRoutingIT.java @@ -57,7 +57,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.opensearch.action.search.TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED_KEY; +import static org.opensearch.action.search.SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED_KEY; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 253a8b2b14824..8fb3c57dd7680 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -64,7 +64,7 @@ import java.util.Set; import java.util.function.Function; -import static org.opensearch.action.search.TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED_KEY; +import static org.opensearch.action.search.SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED_KEY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; diff --git a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java index f18bbb8a1cc13..5b41c2a13b596 100644 --- a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java @@ -214,7 +214,7 @@ public final void start() { 0, 0, buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), ShardSearchFailure.EMPTY_ARRAY, clusters, null @@ -670,7 +670,7 @@ protected final SearchResponse buildSearchResponse( successfulOps.get(), skippedOps.get(), buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), failures, clusters, searchContextId diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java index 674363600b251..eceac7204b196 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java @@ -8,13 +8,11 @@ package org.opensearch.action.search; -import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; import org.opensearch.common.annotation.InternalApi; import java.util.EnumMap; import java.util.HashMap; -import java.util.List; import java.util.Locale; import java.util.Map; @@ -31,18 +29,14 @@ class SearchRequestContext { private TotalHits totalHits; private final EnumMap<ShardStatsFieldNames, Integer> shardStats; - /** - * This constructor is for testing only - */ - SearchRequestContext() { - this(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger())); - } + private final SearchRequest searchRequest; - SearchRequestContext(SearchRequestOperationsListener searchRequestOperationsListener) { + SearchRequestContext(final SearchRequestOperationsListener searchRequestOperationsListener, final SearchRequest searchRequest) { this.searchRequestOperationsListener = searchRequestOperationsListener; this.absoluteStartNanos = System.nanoTime(); this.phaseTookMap = new HashMap<>(); this.shardStats = new EnumMap<>(ShardStatsFieldNames.class); + this.searchRequest = searchRequest; } SearchRequestOperationsListener getSearchRequestOperationsListener() { @@ -57,6 +51,14 @@ Map<String, Long> phaseTookMap() { return phaseTookMap; } + SearchResponse.PhaseTook getPhaseTook() { + if (searchRequest != null && searchRequest.isPhaseTook() != null && searchRequest.isPhaseTook()) { + return new SearchResponse.PhaseTook(phaseTookMap); + } else { + return null; + } + } + /** * Override absoluteStartNanos set in constructor. * For testing only diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java new file mode 100644 index 0000000000000..db487bf945889 --- /dev/null +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactory.java @@ -0,0 +1,81 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.apache.logging.log4j.Logger; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * SearchRequestOperationsCompositeListenerFactory contains listeners registered to search requests, + * and is responsible for creating the {@link SearchRequestOperationsListener.CompositeListener} + * with the all listeners enabled at cluster-level and request-level. + * + * + * @opensearch.internal + */ +public final class SearchRequestOperationsCompositeListenerFactory { + private final List<SearchRequestOperationsListener> searchRequestListenersList; + + /** + * Create the SearchRequestOperationsCompositeListenerFactory and add multiple {@link SearchRequestOperationsListener} + * to the searchRequestListenersList. + * Those enabled listeners will be executed during each search request. + * + * @param listeners Multiple SearchRequestOperationsListener object to add. + * @throws IllegalArgumentException if any input listener is null. + */ + public SearchRequestOperationsCompositeListenerFactory(final SearchRequestOperationsListener... listeners) { + searchRequestListenersList = new ArrayList<>(); + for (SearchRequestOperationsListener listener : listeners) { + if (listener == null) { + throw new IllegalArgumentException("listener must not be null"); + } + searchRequestListenersList.add(listener); + } + } + + /** + * Get searchRequestListenersList, + * + * @return List of SearchRequestOperationsListener + */ + public List<SearchRequestOperationsListener> getListeners() { + return searchRequestListenersList; + } + + /** + * Create the {@link SearchRequestOperationsListener.CompositeListener} + * with the all listeners enabled at cluster-level and request-level. + * + * @param searchRequest The SearchRequest object used to decide which request-level listeners to add based on states/flags + * @param logger Logger to be attached to the {@link SearchRequestOperationsListener.CompositeListener} + * @param perRequestListeners the per-request listeners that can be optionally added to the returned CompositeListener list. + * @return SearchRequestOperationsListener.CompositeListener + */ + public SearchRequestOperationsListener.CompositeListener buildCompositeListener( + final SearchRequest searchRequest, + final Logger logger, + final SearchRequestOperationsListener... perRequestListeners + ) { + final List<SearchRequestOperationsListener> searchListenersList = Stream.concat( + searchRequestListenersList.stream(), + Arrays.stream(perRequestListeners) + ) + .filter((searchRequestOperationsListener -> searchRequestOperationsListener.isEnabled(searchRequest))) + .collect(Collectors.toList()); + + return new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger); + } + +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java index 19ce0beb3c493..2a09cc084f79f 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java @@ -20,7 +20,16 @@ * @opensearch.internal */ @InternalApi -abstract class SearchRequestOperationsListener { +public abstract class SearchRequestOperationsListener { + private volatile boolean enabled; + + protected SearchRequestOperationsListener() { + this.enabled = true; + } + + protected SearchRequestOperationsListener(final boolean enabled) { + this.enabled = enabled; + } abstract void onPhaseStart(SearchPhaseContext context); @@ -32,6 +41,18 @@ void onRequestStart(SearchRequestContext searchRequestContext) {} void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + boolean isEnabled(SearchRequest searchRequest) { + return isEnabled(); + } + + boolean isEnabled() { + return enabled; + } + + protected void setEnabled(final boolean enabled) { + this.enabled = enabled; + } + /** * Holder of Composite Listeners * @@ -101,5 +122,9 @@ public void onRequestEnd(SearchPhaseContext context, SearchRequestContext search } } } + + public List<SearchRequestOperationsListener> getListeners() { + return listeners; + } } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java index a55cfd463a78f..7f25f9026f215 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java @@ -116,11 +116,11 @@ public SearchRequestSlowLog(ClusterService clusterService) { this.logger = logger; Loggers.setLevel(this.logger, SlowLogLevel.TRACE.name()); - this.warnThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING).nanos(); - this.infoThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_INFO_SETTING).nanos(); - this.debugThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_DEBUG_SETTING).nanos(); - this.traceThreshold = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_TRACE_SETTING).nanos(); - this.level = clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_LEVEL); + this.setWarnThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING)); + this.setInfoThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_INFO_SETTING)); + this.setDebugThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_DEBUG_SETTING)); + this.setTraceThreshold(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_TRACE_SETTING)); + this.setLevel(clusterService.getClusterSettings().get(CLUSTER_SEARCH_REQUEST_SLOWLOG_LEVEL)); clusterService.getClusterSettings() .addSettingsUpdateConsumer(CLUSTER_SEARCH_REQUEST_SLOWLOG_THRESHOLD_WARN_SETTING, this::setWarnThreshold); @@ -233,18 +233,22 @@ private static String escapeJson(String text) { void setWarnThreshold(TimeValue warnThreshold) { this.warnThreshold = warnThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setInfoThreshold(TimeValue infoThreshold) { this.infoThreshold = infoThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setDebugThreshold(TimeValue debugThreshold) { this.debugThreshold = debugThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setTraceThreshold(TimeValue traceThreshold) { this.traceThreshold = traceThreshold.nanos(); + setEnabledIfThresholdExceed(); } void setLevel(SlowLogLevel level) { @@ -270,4 +274,8 @@ protected long getTraceThreshold() { SlowLogLevel getLevel() { return level; } + + private void setEnabledIfThresholdExceed() { + super.setEnabled(this.warnThreshold >= 0 || this.debugThreshold >= 0 || this.infoThreshold >= 0 || this.traceThreshold >= 0); + } } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java index 262750849eaa9..88d599a0dcdaa 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java @@ -12,6 +12,8 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.metrics.CounterMetric; import org.opensearch.common.metrics.MeanMetric; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; import java.util.EnumMap; import java.util.Map; @@ -26,8 +28,18 @@ public final class SearchRequestStats extends SearchRequestOperationsListener { Map<SearchPhaseName, StatsHolder> phaseStatsMap = new EnumMap<>(SearchPhaseName.class); + public static final String SEARCH_REQUEST_STATS_ENABLED_KEY = "search.request_stats_enabled"; + public static final Setting<Boolean> SEARCH_REQUEST_STATS_ENABLED = Setting.boolSetting( + SEARCH_REQUEST_STATS_ENABLED_KEY, + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + @Inject - public SearchRequestStats() { + public SearchRequestStats(ClusterSettings clusterSettings) { + this.setEnabled(clusterSettings.get(SEARCH_REQUEST_STATS_ENABLED)); + clusterSettings.addSettingsUpdateConsumer(SEARCH_REQUEST_STATS_ENABLED, this::setEnabled); for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { phaseStatsMap.put(searchPhaseName, new StatsHolder()); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java index 054bd578cc56c..538e7fd54e2c3 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/opensearch/action/search/SearchResponseMerger.java @@ -110,7 +110,7 @@ final class SearchResponseMerger { /** * Add a search response to the list of responses to be merged together into one. * Merges currently happen at once when all responses are available and - * {@link #getMergedResponse(SearchResponse.Clusters)} )} is called. + * {@link #getMergedResponse(SearchResponse.Clusters, SearchRequestContext)} )} is called. * That may change in the future as it's possible to introduce incremental merges as responses come in if necessary. */ void add(SearchResponse searchResponse) { @@ -126,7 +126,7 @@ int numResponses() { * Returns the merged response. To be called once all responses have been added through {@link #add(SearchResponse)} * so that all responses are merged into a single one. */ - SearchResponse getMergedResponse(SearchResponse.Clusters clusters) { + SearchResponse getMergedResponse(SearchResponse.Clusters clusters, SearchRequestContext searchRequestContext) { // if the search is only across remote clusters, none of them are available, and all of them have skip_unavailable set to true, // we end up calling merge without anything to merge, we just return an empty search response if (searchResponses.size() == 0) { @@ -236,7 +236,7 @@ SearchResponse getMergedResponse(SearchResponse.Clusters clusters) { successfulShards, skippedShards, tookInMillis, - searchTimeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), shardFailures, clusters, null diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 05f4308df74fa..842c10b700d24 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -98,7 +98,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.EnumMap; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -154,20 +153,12 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest, Setting.Property.NodeScope ); - public static final String SEARCH_REQUEST_STATS_ENABLED_KEY = "search.request_stats_enabled"; - public static final Setting<Boolean> SEARCH_REQUEST_STATS_ENABLED = Setting.boolSetting( - SEARCH_REQUEST_STATS_ENABLED_KEY, - false, - Property.Dynamic, - Property.NodeScope - ); - public static final String SEARCH_PHASE_TOOK_ENABLED_KEY = "search.phase_took_enabled"; public static final Setting<Boolean> SEARCH_PHASE_TOOK_ENABLED = Setting.boolSetting( SEARCH_PHASE_TOOK_ENABLED_KEY, false, - Property.Dynamic, - Property.NodeScope + Setting.Property.Dynamic, + Setting.Property.NodeScope ); private final NodeClient client; @@ -181,14 +172,10 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest, private final NamedWriteableRegistry namedWriteableRegistry; private final CircuitBreaker circuitBreaker; private final SearchPipelineService searchPipelineService; - - private volatile boolean isRequestStatsEnabled; + private final SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory; private volatile boolean searchQueryMetricsEnabled; - private final SearchRequestStats searchRequestStats; - private final SearchRequestSlowLog searchRequestSlowLog; - private final MetricsRegistry metricsRegistry; private SearchQueryCategorizer searchQueryCategorizer; @@ -207,9 +194,8 @@ public TransportSearchAction( IndexNameExpressionResolver indexNameExpressionResolver, NamedWriteableRegistry namedWriteableRegistry, SearchPipelineService searchPipelineService, - SearchRequestStats searchRequestStats, - SearchRequestSlowLog searchRequestSlowLog, - MetricsRegistry metricsRegistry + MetricsRegistry metricsRegistry, + SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory ) { super(SearchAction.NAME, transportService, actionFilters, (Writeable.Reader<SearchRequest>) SearchRequest::new); this.client = client; @@ -224,12 +210,9 @@ public TransportSearchAction( this.indexNameExpressionResolver = indexNameExpressionResolver; this.namedWriteableRegistry = namedWriteableRegistry; this.searchPipelineService = searchPipelineService; - this.isRequestStatsEnabled = clusterService.getClusterSettings().get(SEARCH_REQUEST_STATS_ENABLED); - clusterService.getClusterSettings().addSettingsUpdateConsumer(SEARCH_REQUEST_STATS_ENABLED, this::setIsRequestStatsEnabled); - this.searchRequestStats = searchRequestStats; - this.searchRequestSlowLog = searchRequestSlowLog; this.metricsRegistry = metricsRegistry; this.searchQueryMetricsEnabled = clusterService.getClusterSettings().get(SEARCH_QUERY_METRICS_ENABLED_SETTING); + this.searchRequestOperationsCompositeListenerFactory = searchRequestOperationsCompositeListenerFactory; clusterService.getClusterSettings() .addSettingsUpdateConsumer(SEARCH_QUERY_METRICS_ENABLED_SETTING, this::setSearchQueryMetricsEnabled); } @@ -241,10 +224,6 @@ private void setSearchQueryMetricsEnabled(boolean searchQueryMetricsEnabled) { } } - private void setIsRequestStatsEnabled(boolean isRequestStatsEnabled) { - this.isRequestStatsEnabled = isRequestStatsEnabled; - } - private Map<String, AliasFilter> buildPerIndexAliasFilter( SearchRequest request, ClusterState clusterState, @@ -289,8 +268,6 @@ private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, Clust } /** - * Listener to track request-level tookTime and phase tookTimes from the coordinator. - * * Search operations need two clocks. One clock is to fulfill real clock needs (e.g., resolving * "now" to an index name). Another clock is needed for measuring how long a search operation * took. These two uses are at odds with each other. There are many issues with using a real @@ -300,12 +277,10 @@ private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, Clust * * @opensearch.internal */ - static final class SearchTimeProvider extends SearchRequestOperationsListener { - + static final class SearchTimeProvider { private final long absoluteStartMillis; private final long relativeStartNanos; private final LongSupplier relativeCurrentNanosProvider; - private boolean phaseTook = false; /** * Instantiates a new search time provider. The absolute start time is the real clock time @@ -331,43 +306,6 @@ long getAbsoluteStartMillis() { long buildTookInMillis() { return TimeUnit.NANOSECONDS.toMillis(relativeCurrentNanosProvider.getAsLong() - relativeStartNanos); } - - public void setPhaseTook(boolean phaseTook) { - this.phaseTook = phaseTook; - } - - SearchResponse.PhaseTook getPhaseTook() { - if (phaseTook) { - Map<String, Long> phaseTookMap = new HashMap<>(); - // Convert Map<SearchPhaseName, Long> to Map<String, Long> for SearchResponse() - for (SearchPhaseName searchPhaseName : phaseStatsMap.keySet()) { - phaseTookMap.put(searchPhaseName.getName(), phaseStatsMap.get(searchPhaseName)); - } - return new SearchResponse.PhaseTook(phaseTookMap); - } else { - return null; - } - } - - Map<SearchPhaseName, Long> phaseStatsMap = new EnumMap<>(SearchPhaseName.class); - - @Override - void onPhaseStart(SearchPhaseContext context) {} - - @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { - phaseStatsMap.put( - context.getCurrentPhase().getSearchPhaseName(), - TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - context.getCurrentPhase().getStartTimeInNanos()) - ); - } - - @Override - void onPhaseFailure(SearchPhaseContext context) {} - - public Long getPhaseTookTime(SearchPhaseName searchPhaseName) { - return phaseStatsMap.get(searchPhaseName); - } } @Override @@ -490,11 +428,12 @@ private void executeRequest( relativeStartNanos, System::nanoTime ); - - final List<SearchRequestOperationsListener> searchListenersList = createSearchListenerList(originalSearchRequest, timeProvider); - SearchRequestContext searchRequestContext = new SearchRequestContext( - new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger) - ); + if (originalSearchRequest.isPhaseTook() == null) { + originalSearchRequest.setPhaseTook(clusterService.getClusterSettings().get(SEARCH_PHASE_TOOK_ENABLED)); + } + SearchRequestOperationsListener.CompositeListener requestOperationsListeners = searchRequestOperationsCompositeListenerFactory + .buildCompositeListener(originalSearchRequest, logger); + SearchRequestContext searchRequestContext = new SearchRequestContext(requestOperationsListeners, originalSearchRequest); searchRequestContext.getSearchRequestOperationsListener().onRequestStart(searchRequestContext); PipelinedRequest searchRequest; @@ -599,7 +538,8 @@ private ActionListener<SearchSourceBuilder> buildRewriteListener( searchContext, searchAsyncActionProvider, searchRequestContext - ) + ), + searchRequestContext ); } else { AtomicInteger skippedClusters = new AtomicInteger(0); @@ -687,7 +627,8 @@ static void ccsRemoteReduce( RemoteClusterService remoteClusterService, ThreadPool threadPool, ActionListener<SearchResponse> listener, - BiConsumer<SearchRequest, ActionListener<SearchResponse>> localSearchConsumer + BiConsumer<SearchRequest, ActionListener<SearchResponse>> localSearchConsumer, + SearchRequestContext searchRequestContext ) { if (localIndices == null && remoteIndices.size() == 1) { @@ -729,7 +670,7 @@ public void onResponse(SearchResponse searchResponse) { searchResponse.getSuccessfulShards(), searchResponse.getSkippedShards(), timeProvider.buildTookInMillis(), - timeProvider.getPhaseTook(), + searchRequestContext.getPhaseTook(), searchResponse.getShardFailures(), new SearchResponse.Clusters(1, 1, 0), searchResponse.pointInTimeId() @@ -775,7 +716,8 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, totalClusters, - listener + listener, + searchRequestContext ); Client remoteClusterClient = remoteClusterService.getRemoteClusterClient(threadPool, clusterAlias); remoteClusterClient.search(ccsSearchRequest, ccsListener); @@ -789,7 +731,8 @@ public void onFailure(Exception e) { exceptions, searchResponseMerger, totalClusters, - listener + listener, + searchRequestContext ); SearchRequest ccsLocalSearchRequest = SearchRequest.subSearchRequest( searchRequest, @@ -884,7 +827,8 @@ private static ActionListener<SearchResponse> createCCSListener( AtomicReference<Exception> exceptions, SearchResponseMerger searchResponseMerger, int totalClusters, - ActionListener<SearchResponse> originalListener + ActionListener<SearchResponse> originalListener, + SearchRequestContext searchRequestContext ) { return new CCSActionListener<SearchResponse, SearchResponse>( clusterAlias, @@ -906,7 +850,7 @@ SearchResponse createFinalResponse() { searchResponseMerger.numResponses(), skippedClusters.get() ); - return searchResponseMerger.getMergedResponse(clusters); + return searchResponseMerger.getMergedResponse(clusters, searchRequestContext); } }; } @@ -1244,35 +1188,6 @@ AbstractSearchAsyncAction<? extends SearchPhaseResult> asyncSearchAction( ); } - private List<SearchRequestOperationsListener> createSearchListenerList(SearchRequest searchRequest, SearchTimeProvider timeProvider) { - final List<SearchRequestOperationsListener> searchListenersList = new ArrayList<>(); - - if (isRequestStatsEnabled) { - searchListenersList.add(searchRequestStats); - } - - // phase_took is enabled with request param and/or cluster setting - Boolean phaseTookRequestParam = searchRequest.isPhaseTook(); - if (phaseTookRequestParam == null) { // check cluster setting only when request param is undefined - if (clusterService.getClusterSettings().get(TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED)) { - timeProvider.setPhaseTook(true); - searchListenersList.add(timeProvider); - } - } else if (phaseTookRequestParam == true) { - timeProvider.setPhaseTook(true); - searchListenersList.add(timeProvider); - } - - if (searchRequestSlowLog.getWarnThreshold() >= 0 - || searchRequestSlowLog.getInfoThreshold() >= 0 - || searchRequestSlowLog.getDebugThreshold() >= 0 - || searchRequestSlowLog.getTraceThreshold() >= 0) { - searchListenersList.add(searchRequestSlowLog); - } - - return searchListenersList; - } - private AbstractSearchAsyncAction<? extends SearchPhaseResult> searchAsyncAction( SearchTask task, SearchRequest searchRequest, diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index fa4b0f475edc5..277286ae1ff1b 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -36,6 +36,7 @@ import org.opensearch.action.admin.indices.close.TransportCloseIndexAction; import org.opensearch.action.search.CreatePitController; import org.opensearch.action.search.SearchRequestSlowLog; +import org.opensearch.action.search.SearchRequestStats; import org.opensearch.action.search.TransportSearchAction; import org.opensearch.action.support.AutoCreateIndex; import org.opensearch.action.support.DestructiveOperations; @@ -380,9 +381,9 @@ public void apply(Settings value, Settings current, Settings previous) { SearchService.DEFAULT_ALLOW_PARTIAL_SEARCH_RESULTS, TransportSearchAction.SHARD_COUNT_LIMIT_SETTING, TransportSearchAction.SEARCH_CANCEL_AFTER_TIME_INTERVAL_SETTING, - TransportSearchAction.SEARCH_REQUEST_STATS_ENABLED, - TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED, TransportSearchAction.SEARCH_QUERY_METRICS_ENABLED_SETTING, + TransportSearchAction.SEARCH_PHASE_TOOK_ENABLED, + SearchRequestStats.SEARCH_REQUEST_STATS_ENABLED, RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE, SniffConnectionStrategy.REMOTE_CONNECTIONS_PER_CLUSTER, RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING, diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 4cbf8dc191a9d..8510122c39fcb 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -46,6 +46,8 @@ import org.opensearch.action.admin.cluster.snapshots.status.TransportNodesSnapshotsStatus; import org.opensearch.action.search.SearchExecutionStatsCollector; import org.opensearch.action.search.SearchPhaseController; +import org.opensearch.action.search.SearchRequestOperationsCompositeListenerFactory; +import org.opensearch.action.search.SearchRequestOperationsListener; import org.opensearch.action.search.SearchRequestSlowLog; import org.opensearch.action.search.SearchRequestStats; import org.opensearch.action.search.SearchTransportService; @@ -783,7 +785,7 @@ protected Node( threadPool ); - final SearchRequestStats searchRequestStats = new SearchRequestStats(); + final SearchRequestStats searchRequestStats = new SearchRequestStats(clusterService.getClusterSettings()); final SearchRequestSlowLog searchRequestSlowLog = new SearchRequestSlowLog(clusterService); remoteStoreStatsTrackerFactory = new RemoteStoreStatsTrackerFactory(clusterService, settings); @@ -879,6 +881,17 @@ protected Node( ) .collect(Collectors.toList()); + // register all standard SearchRequestOperationsCompositeListenerFactory to the SearchRequestOperationsCompositeListenerFactory + final SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory = + new SearchRequestOperationsCompositeListenerFactory( + Stream.concat( + Stream.of(searchRequestStats, searchRequestSlowLog), + pluginComponents.stream() + .filter(p -> p instanceof SearchRequestOperationsListener) + .map(p -> (SearchRequestOperationsListener) p) + ).toArray(SearchRequestOperationsListener[]::new) + ); + ActionModule actionModule = new ActionModule( settings, clusterModule.getIndexNameExpressionResolver(), @@ -1275,6 +1288,7 @@ protected Node( b.bind(RemoteClusterStateService.class).toProvider(() -> remoteClusterStateService); b.bind(PersistedStateRegistry.class).toInstance(persistedStateRegistry); b.bind(SegmentReplicationStatsTracker.class).toInstance(segmentReplicationStatsTracker); + b.bind(SearchRequestOperationsCompositeListenerFactory.class).toInstance(searchRequestOperationsCompositeListenerFactory); }); injector = modules.createInjector(); diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java index b8ab5c935fa34..a5ca08f141560 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -44,6 +44,8 @@ import org.opensearch.cluster.service.ClusterStateStats; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.metrics.OperationStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.indices.breaker.AllCircuitBreakerStats; @@ -961,7 +963,12 @@ public void apply(String action, AdmissionControlActionType admissionControlActi private static NodeIndicesStats getNodeIndicesStats(boolean remoteStoreStats) { NodeIndicesStats indicesStats = null; if (remoteStoreStats) { - indicesStats = new NodeIndicesStats(new CommonStats(CommonStatsFlags.ALL), new HashMap<>(), new SearchRequestStats()); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + indicesStats = new NodeIndicesStats( + new CommonStats(CommonStatsFlags.ALL), + new HashMap<>(), + new SearchRequestStats(clusterSettings) + ); RemoteSegmentStats remoteSegmentStats = indicesStats.getSegments().getRemoteSegmentStats(); remoteSegmentStats.addUploadBytesStarted(10L); remoteSegmentStats.addUploadBytesSucceeded(10L); diff --git a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java index e17fbab32a12e..76129341fc9a2 100644 --- a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java @@ -32,12 +32,15 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.opensearch.action.OriginalIndices; import org.opensearch.action.support.IndicesOptions; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.routing.GroupShardsIterator; import org.opensearch.common.UUIDs; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.AtomicArray; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.set.Sets; @@ -175,7 +178,7 @@ private AbstractSearchAsyncAction<SearchPhaseResult> createAction( results, request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override protected SearchPhase getNextPhase(final SearchPhaseResults<SearchPhaseResult> results, SearchPhaseContext context) { @@ -328,7 +331,8 @@ public void testSendSearchResponseDisallowPartialFailures() { } public void testOnPhaseFailureAndVerifyListeners() { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List<SearchRequestOperationsListener> requestOperationListeners = new ArrayList<>(List.of(testListener)); SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); @@ -591,7 +595,8 @@ public void onFailure(Exception e) { } public void testOnPhaseListenersWithQueryAndThenFetchType() throws InterruptedException { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List<SearchRequestOperationsListener> requestOperationListeners = new ArrayList<>(List.of(testListener)); long delay = (randomIntBetween(1, 5)); @@ -640,7 +645,8 @@ public void testOnPhaseListenersWithQueryAndThenFetchType() throws InterruptedEx } public void testOnPhaseListenersWithDfsType() throws InterruptedException { - SearchRequestStats testListener = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); final List<SearchRequestOperationsListener> requestOperationListeners = new ArrayList<>(List.of(testListener)); SearchDfsQueryThenFetchAsyncAction searchDfsQueryThenFetchAsyncAction = createSearchDfsQueryThenFetchAsyncAction( @@ -710,7 +716,10 @@ private SearchDfsQueryThenFetchAsyncAction createSearchDfsQueryThenFetchAsyncAct null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger)) + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger), + searchRequest + ) ); } @@ -760,7 +769,10 @@ private SearchQueryThenFetchAsyncAction createSearchQueryThenFetchAsyncAction( null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger)) + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(searchRequestOperationsListeners, logger), + searchRequest + ) ) { @Override ShardSearchFailure[] buildShardFailures() { diff --git a/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 4ed4797efe604..56dcf66d5607d 100644 --- a/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/opensearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -31,6 +31,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.action.OriginalIndices; @@ -137,7 +138,10 @@ public void run() throws IOException { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -229,7 +233,10 @@ public void run() throws IOException { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -320,7 +327,10 @@ public void sendCanMatch( new ArraySearchPhaseResults<>(iter.size()), randomIntBetween(1, 32), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ) { @Override @@ -348,7 +358,10 @@ protected void executePhaseOnShard( } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -433,7 +446,10 @@ public void run() { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); @@ -533,7 +549,10 @@ public void run() { } }, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); canMatchPhase.start(); diff --git a/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java index 7b4fa1d8387df..af7adc4e58fb8 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchAsyncActionTests.java @@ -31,6 +31,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.opensearch.Version; import org.opensearch.action.OriginalIndices; import org.opensearch.cluster.ClusterState; @@ -61,6 +62,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -136,7 +138,7 @@ public void testSkipSearchShards() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override @@ -255,7 +257,7 @@ public void testLimitConcurrentShardRequests() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override @@ -373,7 +375,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { TestSearchResponse response = new TestSearchResponse(); @@ -496,7 +498,7 @@ public void sendFreeContext(Transport.Connection connection, ShardSearchContextI new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { TestSearchResponse response = new TestSearchResponse(); @@ -610,7 +612,7 @@ public void testAllowPartialResults() throws InterruptedException { new ArraySearchPhaseResults<>(shardsIter.size()), request.getMaxConcurrentShardRequests(), SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext(new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), request) ) { @Override protected void executePhaseOnShard( diff --git a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java index a8c0c43ac5080..faf6f86c69c27 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TopFieldDocs; @@ -63,6 +64,7 @@ import org.opensearch.transport.Transport; import java.util.Collections; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; @@ -215,7 +217,10 @@ public void sendExecuteQuery( null, task, SearchResponse.Clusters.EMPTY, - new SearchRequestContext() + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ) { @Override protected SearchPhase getNextPhase(SearchPhaseResults<SearchPhaseResult> results, SearchPhaseContext context) { diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java new file mode 100644 index 0000000000000..78c5ba4412c68 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java @@ -0,0 +1,131 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.search; + +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.List; + +public class SearchRequestOperationsCompositeListenerFactoryTests extends OpenSearchTestCase { + public void testAddAndGetListeners() { + SearchRequestOperationsListener testListener = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener + ); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener, requestListeners.getListeners().get(0)); + } + + public void testStandardListenersEnabled() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(false); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1, + testListener2 + ); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger + ); + List<SearchRequestOperationsListener> listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener2, listeners.get(0)); + assertEquals(2, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + assertEquals(testListener2, requestListeners.getListeners().get(1)); + } + + public void testStandardListenersAndPerRequestListener() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(true); + testListener2.setEnabled(true); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + searchRequest.setPhaseTook(true); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List<SearchRequestOperationsListener> listeners = compositeListener.getListeners(); + assertEquals(2, listeners.size()); + assertEquals(testListener1, listeners.get(0)); + assertEquals(testListener2, listeners.get(1)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + } + + public void testStandardListenersDisabledAndPerRequestListener() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + testListener1.setEnabled(false); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List<SearchRequestOperationsListener> listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener2, listeners.get(0)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + assertFalse(requestListeners.getListeners().get(0).isEnabled()); + } + + public void testStandardListenerAndPerRequestListenerDisabled() { + SearchRequestOperationsListener testListener1 = createTestSearchRequestOperationsListener(); + SearchRequestOperationsCompositeListenerFactory requestListeners = new SearchRequestOperationsCompositeListenerFactory( + testListener1 + ); + testListener1.setEnabled(true); + SearchRequestOperationsListener testListener2 = createTestSearchRequestOperationsListener(); + testListener2.setEnabled(false); + + SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); + SearchRequest searchRequest = new SearchRequest().source(source); + searchRequest.setPhaseTook(false); + SearchRequestOperationsListener.CompositeListener compositeListener = requestListeners.buildCompositeListener( + searchRequest, + logger, + testListener2 + ); + List<SearchRequestOperationsListener> listeners = compositeListener.getListeners(); + assertEquals(1, listeners.size()); + assertEquals(testListener1, listeners.get(0)); + assertEquals(1, requestListeners.getListeners().size()); + assertEquals(testListener1, requestListeners.getListeners().get(0)); + } + + public SearchRequestOperationsListener createTestSearchRequestOperationsListener() { + return new SearchRequestOperationsListener() { + @Override + void onPhaseStart(SearchPhaseContext context) {} + + @Override + void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + + @Override + void onPhaseFailure(SearchPhaseContext context) {} + }; + } +} diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java index 58a4c4a4e555d..0f737e00478cb 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsListenerSupport.java @@ -8,6 +8,10 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; + +import java.util.List; + /** * Helper interface to access package protected {@link SearchRequestOperationsListener} from test cases. */ @@ -17,6 +21,12 @@ default void onPhaseStart(SearchRequestOperationsListener listener, SearchPhaseC } default void onPhaseEnd(SearchRequestOperationsListener listener, SearchPhaseContext context) { - listener.onPhaseEnd(context, new SearchRequestContext()); + listener.onPhaseEnd( + context, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); } } diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java index e23f08c9415eb..f009988ffae17 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestSlowLogTests.java @@ -104,6 +104,7 @@ public void testMultipleSlowLoggersUseSingleLog4jLogger() { new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), null ); + SearchRequestOperationsCompositeListenerFactory searchRequestListeners = new SearchRequestOperationsCompositeListenerFactory(); SearchRequestSlowLog searchRequestSlowLog2 = new SearchRequestSlowLog(clusterService2); int numberOfLoggersAfter = context.getLoggers().size(); @@ -175,7 +176,8 @@ public void testConcurrentOnRequestEnd() throws InterruptedException { ArrayList<SearchRequestContext> searchRequestContexts = new ArrayList<>(); for (int i = 0; i < numRequests; i++) { SearchRequestContext searchRequestContext = new SearchRequestContext( - new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger) + new SearchRequestOperationsListener.CompositeListener(searchListenersList, logger), + searchRequest ); searchRequestContext.setAbsoluteStartNanos((i < numRequestsLogged) ? 0 : System.nanoTime()); searchRequestContexts.add(searchRequestContext); @@ -204,7 +206,10 @@ public void testSearchRequestSlowLogHasJsonFields_EmptySearchRequestContext() th SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); SearchRequestSlowLog.SearchRequestSlowLogMessage p = new SearchRequestSlowLog.SearchRequestSlowLogMessage( searchPhaseContext, 10, @@ -225,7 +230,10 @@ public void testSearchRequestSlowLogHasJsonFields_NotEmptySearchRequestContext() SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); @@ -251,7 +259,10 @@ public void testSearchRequestSlowLogHasJsonFields_PartialContext() throws IOExce SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); @@ -277,7 +288,10 @@ public void testSearchRequestSlowLogSearchContextPrinterToLog() throws IOExcepti SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); SearchRequest searchRequest = new SearchRequest().source(source); SearchPhaseContext searchPhaseContext = new MockSearchPhaseContext(1, searchRequest); - SearchRequestContext searchRequestContext = new SearchRequestContext(); + SearchRequestContext searchRequestContext = new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ); searchRequestContext.updatePhaseTookMap(SearchPhaseName.FETCH.getName(), 10L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.QUERY.getName(), 50L); searchRequestContext.updatePhaseTookMap(SearchPhaseName.EXPAND.getName(), 5L); diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java index 93cf77933fdd5..377ccebbfd418 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestStatsTests.java @@ -8,9 +8,13 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchTestCase; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Phaser; @@ -22,7 +26,8 @@ public class SearchRequestStatsTests extends OpenSearchTestCase { public void testSearchRequestPhaseFailure() { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); SearchPhase mockSearchPhase = mock(SearchPhase.class); when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); @@ -37,7 +42,8 @@ public void testSearchRequestPhaseFailure() { } public void testSearchRequestStats() { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); SearchPhase mockSearchPhase = mock(SearchPhase.class); @@ -50,7 +56,13 @@ public void testSearchRequestStats() { long startTime = System.nanoTime() - TimeUnit.MILLISECONDS.toNanos(tookTimeInMillis); when(mockSearchPhase.getStartTimeInNanos()).thenReturn(startTime); assertEquals(1, testRequestStats.getPhaseCurrent(searchPhaseName)); - testRequestStats.onPhaseEnd(ctx, new SearchRequestContext()); + testRequestStats.onPhaseEnd( + ctx, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(0, testRequestStats.getPhaseCurrent(searchPhaseName)); assertEquals(1, testRequestStats.getPhaseTotal(searchPhaseName)); assertThat(testRequestStats.getPhaseMetric(searchPhaseName), greaterThanOrEqualTo(tookTimeInMillis)); @@ -58,7 +70,8 @@ public void testSearchRequestStats() { } public void testSearchRequestStatsOnPhaseStartConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); @@ -85,7 +98,8 @@ public void testSearchRequestStatsOnPhaseStartConcurrently() throws InterruptedE } public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); @@ -102,7 +116,13 @@ public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedExc for (int i = 0; i < numTasks; i++) { threads[i] = new Thread(() -> { phaser.arriveAndAwaitAdvance(); - testRequestStats.onPhaseEnd(ctx, new SearchRequestContext()); + testRequestStats.onPhaseEnd( + ctx, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); countDownLatch.countDown(); }); threads[i].start(); @@ -121,7 +141,8 @@ public void testSearchRequestStatsOnPhaseEndConcurrently() throws InterruptedExc } public void testSearchRequestStatsOnPhaseFailureConcurrently() throws InterruptedException { - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); int numTasks = randomIntBetween(5, 50); Thread[] threads = new Thread[numTasks * SearchPhaseName.values().length]; Phaser phaser = new Phaser(numTasks * SearchPhaseName.values().length + 1); diff --git a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java index 1004965c0d50e..ce4d5ca4f7091 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchResponseMergerTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TotalHits; import org.opensearch.OpenSearchException; @@ -132,7 +133,13 @@ public void testMergeTookInMillis() throws InterruptedException { addResponse(merger, searchResponse); } awaitResponsesAdded(); - SearchResponse searchResponse = merger.getMergedResponse(SearchResponse.Clusters.EMPTY); + SearchResponse searchResponse = merger.getMergedResponse( + SearchResponse.Clusters.EMPTY, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); } @@ -184,7 +191,13 @@ public void testMergeShardFailures() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -235,7 +248,13 @@ public void testMergeShardFailuresNullShardTarget() throws InterruptedException awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -281,7 +300,13 @@ public void testMergeShardFailuresNullShardId() throws InterruptedException { } awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); - ShardSearchFailure[] shardFailures = merger.getMergedResponse(SearchResponse.Clusters.EMPTY).getShardFailures(); + ShardSearchFailure[] shardFailures = merger.getMergedResponse( + SearchResponse.Clusters.EMPTY, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ).getShardFailures(); assertThat(Arrays.asList(shardFailures), containsInAnyOrder(expectedFailures.toArray(ShardSearchFailure.EMPTY_ARRAY))); } @@ -315,7 +340,13 @@ public void testMergeProfileResults() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, merger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -377,7 +408,13 @@ public void testMergeCompletionSuggestions() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -449,7 +486,13 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -523,7 +566,13 @@ public void testMergeAggs() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse mergedResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, mergedResponse.getClusters()); assertEquals(numResponses, mergedResponse.getTotalShards()); assertEquals(numResponses, mergedResponse.getSuccessfulShards()); @@ -680,7 +729,13 @@ public void testMergeSearchHits() throws InterruptedException { awaitResponsesAdded(); assertEquals(numResponses, searchResponseMerger.numResponses()); final SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); - SearchResponse searchResponse = searchResponseMerger.getMergedResponse(clusters); + SearchResponse searchResponse = searchResponseMerger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), searchResponse.getTook().millis()); assertEquals(expectedTotal, searchResponse.getTotalShards()); @@ -740,7 +795,13 @@ public void testMergeNoResponsesAdded() { SearchResponseMerger merger = new SearchResponseMerger(0, 10, Integer.MAX_VALUE, timeProvider, emptyReduceContextBuilder()); SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); assertEquals(0, merger.numResponses()); - SearchResponse response = merger.getMergedResponse(clusters); + SearchResponse response = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertSame(clusters, response.getClusters()); assertEquals(TimeUnit.NANOSECONDS.toMillis(currentRelativeTime), response.getTook().millis()); assertEquals(0, response.getTotalShards()); @@ -813,7 +874,13 @@ public void testMergeEmptySearchHitsWithNonEmpty() { merger.add(searchResponse); } assertEquals(2, merger.numResponses()); - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(10, mergedResponse.getHits().getTotalHits().value); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); @@ -855,7 +922,13 @@ public void testMergeOnlyEmptyHits() { ); merger.add(searchResponse); } - SearchResponse mergedResponse = merger.getMergedResponse(clusters); + SearchResponse mergedResponse = merger.getMergedResponse( + clusters, + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + new SearchRequest() + ) + ); assertEquals(expectedTotalHits, mergedResponse.getHits().getTotalHits()); } diff --git a/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java b/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java deleted file mode 100644 index 4d8a44417a3ee..0000000000000 --- a/server/src/test/java/org/opensearch/action/search/SearchTimeProviderTests.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.action.search; - -import org.opensearch.test.OpenSearchTestCase; - -import java.util.concurrent.TimeUnit; - -import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -public class SearchTimeProviderTests extends OpenSearchTestCase { - - public void testSearchTimeProviderPhaseFailure() { - TransportSearchAction.SearchTimeProvider testTimeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); - SearchPhaseContext ctx = mock(SearchPhaseContext.class); - SearchPhase mockSearchPhase = mock(SearchPhase.class); - when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); - - for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { - when(mockSearchPhase.getSearchPhaseName()).thenReturn(searchPhaseName); - testTimeProvider.onPhaseStart(ctx); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - testTimeProvider.onPhaseFailure(ctx); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - } - } - - public void testSearchTimeProviderPhaseEnd() { - TransportSearchAction.SearchTimeProvider testTimeProvider = new TransportSearchAction.SearchTimeProvider(0, 0, () -> 0); - - SearchPhaseContext ctx = mock(SearchPhaseContext.class); - SearchPhase mockSearchPhase = mock(SearchPhase.class); - when(ctx.getCurrentPhase()).thenReturn(mockSearchPhase); - - for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { - when(mockSearchPhase.getSearchPhaseName()).thenReturn(searchPhaseName); - long tookTimeInMillis = randomIntBetween(1, 100); - testTimeProvider.onPhaseStart(ctx); - long startTime = System.nanoTime() - TimeUnit.MILLISECONDS.toNanos(tookTimeInMillis); - when(mockSearchPhase.getStartTimeInNanos()).thenReturn(startTime); - assertNull(testTimeProvider.getPhaseTookTime(searchPhaseName)); - testTimeProvider.onPhaseEnd(ctx, new SearchRequestContext()); - assertThat(testTimeProvider.getPhaseTookTime(searchPhaseName), greaterThanOrEqualTo(tookTimeInMillis)); - } - } -} diff --git a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java index c4bf8a5d87172..da19c839f3826 100644 --- a/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/TransportSearchActionTests.java @@ -32,6 +32,7 @@ package org.opensearch.action.search; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.search.TotalHits; import org.opensearch.Version; import org.opensearch.action.LatchedActionListener; @@ -483,7 +484,11 @@ public void testCCSRemoteReduceMergeFails() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -541,7 +546,11 @@ public void testCCSRemoteReduce() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -578,7 +587,11 @@ public void testCCSRemoteReduce() throws Exception { remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -636,7 +649,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -676,7 +693,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); @@ -727,7 +748,11 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti remoteClusterService, threadPool, listener, - (r, l) -> setOnce.set(Tuple.tuple(r, l)) + (r, l) -> setOnce.set(Tuple.tuple(r, l)), + new SearchRequestContext( + new SearchRequestOperationsListener.CompositeListener(List.of(), LogManager.getLogger()), + searchRequest + ) ); if (localIndices == null) { assertNull(setOnce.get()); diff --git a/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java b/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java index 52b272094cd86..5656b77445772 100644 --- a/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java +++ b/server/src/test/java/org/opensearch/index/search/stats/SearchStatsTests.java @@ -37,6 +37,8 @@ import org.opensearch.action.search.SearchPhaseName; import org.opensearch.action.search.SearchRequestOperationsListenerSupport; import org.opensearch.action.search.SearchRequestStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.index.search.stats.SearchStats.Stats; import org.opensearch.test.OpenSearchTestCase; @@ -77,7 +79,8 @@ public void testShardLevelSearchGroupStats() throws Exception { long paramValue = randomIntBetween(2, 50); // Testing for request stats - SearchRequestStats testRequestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testRequestStats = new SearchRequestStats(clusterSettings); SearchPhaseContext ctx = mock(SearchPhaseContext.class); for (SearchPhaseName searchPhaseName : SearchPhaseName.values()) { SearchPhase mockSearchPhase = mock(SearchPhase.class); diff --git a/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java b/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java index 6f36d22b7e17b..2424e38636466 100644 --- a/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java +++ b/server/src/test/java/org/opensearch/indices/NodeIndicesStatsTests.java @@ -34,6 +34,8 @@ import org.opensearch.action.admin.indices.stats.CommonStats; import org.opensearch.action.search.SearchRequestStats; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.test.OpenSearchTestCase; @@ -46,7 +48,8 @@ public class NodeIndicesStatsTests extends OpenSearchTestCase { public void testInvalidLevel() { CommonStats oldStats = new CommonStats(); - SearchRequestStats requestStats = new SearchRequestStats(); + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats requestStats = new SearchRequestStats(clusterSettings); final NodeIndicesStats stats = new NodeIndicesStats(oldStats, Collections.emptyMap(), requestStats); final String level = randomAlphaOfLength(16); final ToXContent.Params params = new ToXContent.MapParams(Collections.singletonMap("level", level)); diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 9fe1f8294fc74..9bb1f51c51cf6 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -90,7 +90,7 @@ import org.opensearch.action.search.SearchExecutionStatsCollector; import org.opensearch.action.search.SearchPhaseController; import org.opensearch.action.search.SearchRequest; -import org.opensearch.action.search.SearchRequestSlowLog; +import org.opensearch.action.search.SearchRequestOperationsCompositeListenerFactory; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchTransportService; import org.opensearch.action.search.TransportSearchAction; @@ -2285,6 +2285,8 @@ public void onFailure(final Exception e) { writableRegistry(), searchService::aggReduceContextBuilder ); + SearchRequestOperationsCompositeListenerFactory searchRequestOperationsCompositeListenerFactory = + new SearchRequestOperationsCompositeListenerFactory(); actions.put( SearchAction.INSTANCE, new TransportSearchAction( @@ -2310,9 +2312,8 @@ public void onFailure(final Exception e) { List.of(), client ), - null, - new SearchRequestSlowLog(clusterService), - NoopMetricsRegistry.INSTANCE + NoopMetricsRegistry.INSTANCE, + searchRequestOperationsCompositeListenerFactory ) ); actions.put( From a90b6b64d5fbde8c13b6acca076584e8060c599a Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Fri, 12 Jan 2024 05:30:55 +0800 Subject: [PATCH 085/178] Fix noop_update_total metric in indexing stats cannot be updated by bulk API (#11485) Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../test/indices.stats/50_noop_update.yml | 55 +++++++++++++++++++ .../action/bulk/BulkWithUpdatesIT.java | 8 +++ .../action/bulk/TransportShardBulkAction.java | 1 + 4 files changed, 65 insertions(+) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f64dcf82bd4d..1edb8560357cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -216,6 +216,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) +- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485)) - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml new file mode 100644 index 0000000000000..dd8c2a2deb721 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml @@ -0,0 +1,55 @@ +--- +setup: + + - do: + indices.create: + index: test1 + wait_for_active_shards: all + body: + settings: + index.number_of_shards: 1 + index.number_of_replicas: 1 + + - do: + index: + index: test1 + id: 1 + body: { "bar": "bar" } + + - do: + indices.refresh: {} + +# Related issue: https://github.com/opensearch-project/OpenSearch/issues/9857 +--- +"Test noop_update_total metric can be updated by both update API and bulk API": + - skip: + version: " - 2.99.99" #TODO: change to 2.11.99 after the PR is backported to 2.x branch + reason: "fixed in 3.0" + + - do: + update: + index: test1 + id: 1 + body: { "doc": { "bar": "bar" } } + + - do: + indices.stats: + index: test1 + metric: indexing + + - match: { indices.test1.primaries.indexing.noop_update_total: 1 } + - match: { indices.test1.total.indexing.noop_update_total: 1 } + + - do: + bulk: + body: | + {"update": {"_id": "1", "_index": "test1"}} + {"doc": {"bar": "bar"}} + + - do: + indices.stats: + index: test1 + metric: indexing + + - match: { indices.test1.primaries.indexing.noop_update_total: 2 } + - match: { indices.test1.total.indexing.noop_update_total: 2 } diff --git a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java index d7fb632c847d1..e27c0c4786da8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/bulk/BulkWithUpdatesIT.java @@ -35,6 +35,8 @@ import org.opensearch.action.DocWriteRequest.OpType; import org.opensearch.action.DocWriteResponse; import org.opensearch.action.admin.indices.alias.Alias; +import org.opensearch.action.admin.indices.stats.IndicesStatsRequest; +import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.delete.DeleteRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.action.index.IndexRequest; @@ -738,6 +740,12 @@ public void testNoopUpdate() { equalTo(2) ); + // test noop_update_total metric in stats changed + IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest().indices(indexName).indexing(true); + final IndicesStatsResponse indicesStatsResponse = client().admin().indices().stats(indicesStatsRequest).actionGet(); + assertThat(indicesStatsResponse.getIndex(indexName).getTotal().indexing.getTotal().getNoopUpdateCount(), equalTo(1L)); + assertThat(indicesStatsResponse.getIndex(indexName).getPrimaries().indexing.getTotal().getNoopUpdateCount(), equalTo(1L)); + final BulkItemResponse notFoundUpdate = bulkResponse.getItems()[1]; assertNotNull(notFoundUpdate.getFailure()); diff --git a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java index b8517f53ff294..a7a13afd2597c 100644 --- a/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/opensearch/action/bulk/TransportShardBulkAction.java @@ -593,6 +593,7 @@ static boolean executeBulkItemRequest( context.setRequestToExecute(updateResult.action()); break; case NOOP: + context.getPrimary().noopUpdate(); context.markOperationAsNoOp(updateResult.action()); context.markAsCompleted(context.getExecutionResult()); return true; From 5c82ab885a876d659c9714c3b080488777506027 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 11 Jan 2024 17:48:15 -0500 Subject: [PATCH 086/178] Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings (#11811) * Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --------- Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 1 + buildSrc/version.properties | 4 +- .../licenses/jackson-core-2.16.0.jar.sha1 | 1 - .../licenses/jackson-core-2.16.1.jar.sha1 | 1 + .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../licenses/jackson-core-2.16.0.jar.sha1 | 1 - .../licenses/jackson-core-2.16.1.jar.sha1 | 1 + .../licenses/jackson-core-2.16.0.jar.sha1 | 1 - .../licenses/jackson-core-2.16.1.jar.sha1 | 1 + .../jackson-dataformat-cbor-2.16.0.jar.sha1 | 1 - .../jackson-dataformat-cbor-2.16.1.jar.sha1 | 1 + .../jackson-dataformat-smile-2.16.0.jar.sha1 | 1 - .../jackson-dataformat-smile-2.16.1.jar.sha1 | 1 + .../jackson-dataformat-yaml-2.16.0.jar.sha1 | 1 - .../jackson-dataformat-yaml-2.16.1.jar.sha1 | 1 + .../common/xcontent/XContentContraints.java | 35 +++ .../common/xcontent/cbor/CborXContent.java | 17 +- .../common/xcontent/json/JsonXContent.java | 17 +- .../common/xcontent/smile/SmileXContent.java | 17 +- .../common/xcontent/yaml/YamlXContent.java | 17 +- .../common/xcontent/XContentParserTests.java | 232 ++++++++++++++++++ .../common/xcontent/depth-off-limit.cbor.gz | Bin 0 -> 1888 bytes .../common/xcontent/depth-off-limit.json.gz | Bin 0 -> 2045 bytes .../common/xcontent/depth-off-limit.smile.gz | Bin 0 -> 1906 bytes .../common/xcontent/depth-off-limit.yaml.gz | Bin 0 -> 5950 bytes .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../jackson-dataformat-xml-2.16.0.jar.sha1 | 1 - .../jackson-dataformat-xml-2.16.1.jar.sha1 | 1 + .../jackson-datatype-jsr310-2.16.0.jar.sha1 | 1 - .../jackson-datatype-jsr310-2.16.1.jar.sha1 | 1 + ...on-module-jaxb-annotations-2.16.0.jar.sha1 | 1 - ...on-module-jaxb-annotations-2.16.1.jar.sha1 | 1 + .../jackson-annotations-2.16.0.jar.sha1 | 1 - .../jackson-annotations-2.16.1.jar.sha1 | 1 + .../licenses/jackson-databind-2.16.0.jar.sha1 | 1 - .../licenses/jackson-databind-2.16.1.jar.sha1 | 1 + .../index/mapper/MapperService.java | 35 ++- .../index/mapper/MapperServiceTests.java | 42 ++++ 56 files changed, 411 insertions(+), 48 deletions(-) delete mode 100644 client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 create mode 100644 client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 delete mode 100644 distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 delete mode 100644 libs/core/licenses/jackson-core-2.16.0.jar.sha1 create mode 100644 libs/core/licenses/jackson-core-2.16.1.jar.sha1 delete mode 100644 libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 create mode 100644 libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 delete mode 100644 libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 create mode 100644 libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 delete mode 100644 libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 create mode 100644 libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 delete mode 100644 libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 create mode 100644 libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 create mode 100644 libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java create mode 100644 libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz create mode 100644 libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz create mode 100644 libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz create mode 100644 libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz delete mode 100644 modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 delete mode 100644 plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 create mode 100644 plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 create mode 100644 plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 1edb8560357cf..10338f6646053 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -194,6 +194,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Capture information for additional query types and aggregation types ([#11582](https://github.com/opensearch-project/OpenSearch/pull/11582)) - Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) - Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) +- Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) ### Deprecated diff --git a/buildSrc/version.properties b/buildSrc/version.properties index eceb08c05953d..3813750507f18 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -7,8 +7,8 @@ bundled_jdk = 21.0.1+12 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.16.0 -jackson_databind = 2.16.0 +jackson = 2.16.1 +jackson_databind = 2.16.1 snakeyaml = 2.1 icu4j = 70.1 supercsv = 2.4.0 diff --git a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/distribution/tools/upgrade-cli/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 b/libs/core/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/libs/core/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/libs/core/licenses/jackson-core-2.16.1.jar.sha1 b/libs/core/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/libs/core/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 deleted file mode 100644 index c2b70fb4ae202..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -899e5cf01be55fbf094ad72b2edb0c5df99111ee \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..908d071b34a2a --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.16.1.jar.sha1 @@ -0,0 +1 @@ +9456bb3cdd0f79f91a5f730a1b1bb041a380c91f \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 deleted file mode 100644 index 8da478fc6013d..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35e8b7bf4fc1d078766bb155103d433ed5bb1627 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..b4b781f604910 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.16.1.jar.sha1 @@ -0,0 +1 @@ +1be7098dccc079171464dca7e386bd8df623b031 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 deleted file mode 100644 index 3e952ffe92418..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3c422d7f3901c9a1becf9df3cf41efc68a5ab95c \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..ad91e748ebe94 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.16.1.jar.sha1 @@ -0,0 +1 @@ +c4ddbc5277670f2e56b1f5e44e83afa748bcb125 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 deleted file mode 100644 index d62b5874ab023..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2033e2c5f531785d17f3a2bc31842e3bbb7983b2 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..9b30e7bf921b2 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.16.1.jar.sha1 @@ -0,0 +1 @@ +8e4f1923d73cd55f2b4c0d56ee4ed80419297354 \ No newline at end of file diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java new file mode 100644 index 0000000000000..4c05f0058f2ed --- /dev/null +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/XContentContraints.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.xcontent; + +import com.fasterxml.jackson.core.StreamReadConstraints; + +import org.opensearch.common.annotation.InternalApi; + +/** + * Consolidates the XContent constraints (primarily reflecting Jackson's {@link StreamReadConstraints} constraints) + * + * @opensearch.internal + */ +@InternalApi +public interface XContentContraints { + final String DEFAULT_MAX_STRING_LEN_PROPERTY = "opensearch.xcontent.string.length.max"; + final String DEFAULT_MAX_NAME_LEN_PROPERTY = "opensearch.xcontent.name.length.max"; + final String DEFAULT_MAX_DEPTH_PROPERTY = "opensearch.xcontent.depth.max"; + + final int DEFAULT_MAX_STRING_LEN = Integer.parseInt(System.getProperty(DEFAULT_MAX_STRING_LEN_PROPERTY, "50000000" /* ~50 Mb */)); + + final int DEFAULT_MAX_NAME_LEN = Integer.parseInt( + System.getProperty(DEFAULT_MAX_NAME_LEN_PROPERTY, "50000" /* StreamReadConstraints.DEFAULT_MAX_NAME_LEN */) + ); + + final int DEFAULT_MAX_DEPTH = Integer.parseInt( + System.getProperty(DEFAULT_MAX_DEPTH_PROPERTY, "1000" /* StreamReadConstraints.DEFAULT_MAX_DEPTH */) + ); +} diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java index 81f8fe9b6366f..7e92f236213d4 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/cbor/CborXContent.java @@ -37,8 +37,10 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -58,11 +60,7 @@ /** * A CBOR based content implementation using Jackson. */ -public class CborXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class CborXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(cborXContent); } @@ -76,7 +74,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - cborFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + cborFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + cborFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); cborFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); cborXContent = new CborXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java index 4bd7c4c99bb46..91f6bbeb4f786 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/json/JsonXContent.java @@ -38,7 +38,9 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -57,11 +59,7 @@ /** * A JSON based content implementation using Jackson. */ -public class JsonXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class JsonXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(jsonXContent); } @@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - jsonFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + jsonFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + jsonFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); jsonFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); jsonXContent = new JsonXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java index e824d4e1ae991..c73e126102a80 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/smile/SmileXContent.java @@ -37,9 +37,11 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -58,11 +60,7 @@ /** * A Smile based content implementation using Jackson. */ -public class SmileXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class SmileXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(smileXContent); } @@ -78,7 +76,14 @@ public static XContentBuilder contentBuilder() throws IOException { // Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false); smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - smileFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + smileFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + smileFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); smileFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); smileXContent = new SmileXContent(); } diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java index 0ad3c44e0168a..3f6a4b3aeead7 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/yaml/YamlXContent.java @@ -36,8 +36,10 @@ import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.core.StreamReadFeature; +import com.fasterxml.jackson.core.StreamWriteConstraints; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.xcontent.DeprecationHandler; import org.opensearch.core.xcontent.MediaType; @@ -56,11 +58,7 @@ /** * A YAML based content implementation using Jackson. */ -public class YamlXContent implements XContent { - public static final int DEFAULT_MAX_STRING_LEN = Integer.parseInt( - System.getProperty("opensearch.xcontent.string.length.max", "50000000" /* ~50 Mb */) - ); - +public class YamlXContent implements XContent, XContentContraints { public static XContentBuilder contentBuilder() throws IOException { return XContentBuilder.builder(yamlXContent); } @@ -71,7 +69,14 @@ public static XContentBuilder contentBuilder() throws IOException { static { yamlFactory = new YAMLFactory(); yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, true); - yamlFactory.setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(DEFAULT_MAX_STRING_LEN).build()); + yamlFactory.setStreamWriteConstraints(StreamWriteConstraints.builder().maxNestingDepth(DEFAULT_MAX_DEPTH).build()); + yamlFactory.setStreamReadConstraints( + StreamReadConstraints.builder() + .maxStringLength(DEFAULT_MAX_STRING_LEN) + .maxNameLength(DEFAULT_MAX_NAME_LEN) + .maxNestingDepth(DEFAULT_MAX_DEPTH) + .build() + ); yamlFactory.configure(StreamReadFeature.USE_FAST_DOUBLE_PARSER.mappedFeature(), true); yamlXContent = new YamlXContent(); } diff --git a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java index d3d9ea174cf1b..0e431d8ea4277 100644 --- a/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java +++ b/libs/x-content/src/test/java/org/opensearch/common/xcontent/XContentParserTests.java @@ -40,6 +40,7 @@ import org.opensearch.common.xcontent.cbor.CborXContent; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.common.xcontent.smile.SmileXContent; +import org.opensearch.common.xcontent.yaml.YamlXContent; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.core.xcontent.XContentParseException; @@ -48,16 +49,20 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; +import java.io.InputStream; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.function.Supplier; +import java.util.zip.GZIPInputStream; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; +import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -67,6 +72,7 @@ import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assume.assumeThat; import static org.junit.internal.matchers.ThrowableMessageMatcher.hasMessage; public class XContentParserTests extends OpenSearchTestCase { @@ -94,6 +100,50 @@ public class XContentParserTests extends OpenSearchTestCase { () -> randomRealisticUnicodeOfCodepointLength(3145730) ); + private static final Map<XContentType, Supplier<String>> FIELD_NAME_GENERATORS = Map.of( + XContentType.JSON, + () -> randomAlphaOfLengthBetween(1, JsonXContent.DEFAULT_MAX_NAME_LEN), + XContentType.CBOR, + () -> randomAlphaOfLengthBetween(1, CborXContent.DEFAULT_MAX_NAME_LEN), + XContentType.SMILE, + () -> randomAlphaOfLengthBetween(1, SmileXContent.DEFAULT_MAX_NAME_LEN), + XContentType.YAML, + () -> randomAlphaOfLengthBetween(1, YamlXContent.DEFAULT_MAX_NAME_LEN) + ); + + private static final Map<XContentType, Supplier<String>> FIELD_NAME_OFF_LIMIT_GENERATORS = Map.of( + XContentType.JSON, + () -> randomAlphaOfLength(JsonXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.CBOR, + () -> randomAlphaOfLength(CborXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.SMILE, + () -> randomAlphaOfLength(SmileXContent.DEFAULT_MAX_NAME_LEN + 1), + XContentType.YAML, + () -> randomAlphaOfLength(YamlXContent.DEFAULT_MAX_NAME_LEN + 1) + ); + + private static final Map<XContentType, Supplier<Integer>> DEPTH_GENERATORS = Map.of( + XContentType.JSON, + () -> randomIntBetween(1, JsonXContent.DEFAULT_MAX_DEPTH), + XContentType.CBOR, + () -> randomIntBetween(1, CborXContent.DEFAULT_MAX_DEPTH), + XContentType.SMILE, + () -> randomIntBetween(1, SmileXContent.DEFAULT_MAX_DEPTH), + XContentType.YAML, + () -> randomIntBetween(1, YamlXContent.DEFAULT_MAX_DEPTH) + ); + + private static final Map<XContentType, Supplier<Integer>> OFF_LIMIT_DEPTH_GENERATORS = Map.of( + XContentType.JSON, + () -> JsonXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.CBOR, + () -> CborXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.SMILE, + () -> SmileXContent.DEFAULT_MAX_DEPTH + 1, + XContentType.YAML, + () -> YamlXContent.DEFAULT_MAX_DEPTH + 1 + ); + public void testStringOffLimit() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); @@ -155,6 +205,188 @@ public void testString() throws IOException { } } + public void testFieldNameOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = FIELD_NAME_OFF_LIMIT_GENERATORS.get(xContentType).get(); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + // See please https://github.com/FasterXML/jackson-dataformats-binary/issues/392, support + // for CBOR, Smile is coming + if (xContentType != XContentType.JSON) { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } else { + assertThrows(StreamConstraintsException.class, () -> parser.nextToken()); + } + } + } + } + + public void testFieldName() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = FIELD_NAME_GENERATORS.get(xContentType).get(); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } + } + } + + public void testWriteDepthOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + // Branching off YAML logic into separate test case testWriteDepthOffLimitYaml since it behaves differently + assumeThat(xContentType, not(XContentType.YAML)); + + final String field = randomAlphaOfLengthBetween(1, 5); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + // The behavior here is very interesting: the generator does write the new object tag (changing the internal state) + // BUT throws the exception after the fact, this is why we have to close the object at the end. + assertThrows(StreamConstraintsException.class, () -> builder.startObject()); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + + builder.endObject(); + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.endObject(); + } + } + } + + public void testWriteDepthOffLimitYaml() throws IOException { + final String field = randomAlphaOfLengthBetween(1, 5); + try (XContentBuilder builder = XContentBuilder.builder(XContentType.YAML.xContent())) { + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(XContentType.YAML).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + // The behavior here is very interesting: the generator does write the new object tag (changing the internal state) + // BUT throws the exception after the fact, this is why we have to close the object at the end. + assertThrows(StreamConstraintsException.class, () -> builder.startObject()); + } catch (final IllegalStateException ex) { + // YAML parser is having really hard time recovering from StreamConstraintsException, the internal + // state seems to be completely messed up and the closing cleanly seems to be not feasible. + } + } + + public void testReadDepthOffLimit() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + final int maxDepth = OFF_LIMIT_DEPTH_GENERATORS.get(xContentType).get() - 1; + + // Since parser and generator use the same max depth constraints, we could not generate the content with off limits, + // using precreated test files instead. + try ( + InputStream in = new GZIPInputStream( + getDataInputStream("depth-off-limit." + xContentType.name().toLowerCase(Locale.US) + ".gz") + ) + ) { + try (XContentParser parser = createParser(xContentType.xContent(), in)) { + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + } + + if (xContentType != XContentType.YAML) { + assertThrows(StreamConstraintsException.class, () -> parser.nextToken()); + } + } + } + } + + public void testDepth() throws IOException { + final XContentType xContentType = randomFrom(XContentType.values()); + + final String field = randomAlphaOfLengthBetween(1, 5); + final String value = randomAlphaOfLengthBetween(1, 5); + + try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) { + final int maxDepth = DEPTH_GENERATORS.get(xContentType).get() - 1; + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.startObject(); + builder.field(field + depth); + } + + builder.startObject(); + if (randomBoolean()) { + builder.field(field, value); + } else { + builder.field(field).value(value); + } + builder.endObject(); + + for (int depth = 0; depth < maxDepth; ++depth) { + builder.endObject(); + } + + try (XContentParser parser = createParser(xContentType.xContent(), BytesReference.bytes(builder))) { + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field + depth, parser.currentName()); + } + + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); + assertEquals(field, parser.currentName()); + assertEquals(XContentParser.Token.VALUE_STRING, parser.nextToken()); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + + for (int depth = 0; depth < maxDepth; ++depth) { + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + } + + assertNull(parser.nextToken()); + } + } + } + public void testFloat() throws IOException { final XContentType xContentType = randomFrom(XContentType.values()); diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.cbor.gz new file mode 100644 index 0000000000000000000000000000000000000000..88de7e590e7f0e785fb1462c78ddac6aed50e9df GIT binary patch literal 1888 zcmb2|=HS>^IX9JwIVH8ABtthpEloEkGdHtDFF7f{h~e$!-GV8{1zjDdm%X}vRmtQ6 z4~vJENs+H|<I62g8+ETty*DxL-=F92|NlJRKfnI|`R~WRKl^^|`?v4szR!Dq_x)b` z|6jj{+uy6XcVu==aQ?5edo1(6on3td3w*K~6JA-33BRnyglAS`!a3oV>x<%^?m1OC z^|RLV&~vLn0-LbFqWGu#PSsBRt@S<h-Re8xJJ$mR?#=)I?_a6?o8xW1(c9kMNWT86 z_KRJW{NDb1?(Y|WzxewFh`Uf61+AQ3g^ZSlS0SUd*Q=1xV)H6w<o^DJ?B2!YU;6G{ zw5|ej*4}%CjAHk$F8{jj-qqbz;qPC?+S)bsKT!U`F=VJo{ZEvCde{vWs;mFe;vX7z zLxq~v|7`Kk47;I1we>$t{vlyER46|A_Wr}iA3pw2_`{+`#%?HalYd6oP3wO;RH(<F z3V-gXiLuj@pWc7^_|qZ8y|cgO&(G`sKHjdc-y8q_`tSes|Cy7zUV0o-VqgFOx*s+M literal 0 HcmV?d00001 diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..76274910542ac93a3802d0a548e1f60dad2d8ff3 GIT binary patch literal 2045 zcmZXUe^8Ql9LF`iXKlmk&RqlDa9cYo%P`1vscG}h?q(~uoODfuC7@OcWebF`xo5jt z>Zp{|Qt~p>%Zi$H>fn*F&Q*vSIU|<Avz$qS;4dsiaUeh9k^BDfeeV7Jyk76mUr$K! z(xtZwYG_ORs5Iu0>~IF19)2(<H|NNP{rQYMpAog}Ou9HAL75)<W9ZwbVg4Ug>_I_= zv|3DxjMaURe*R0*k>p2`UcSH8I(<*NHU9YAoRmLi*?2tuv1$8k;eV!k*12eDh}m*T z8&XkFGxqs_;IHITRS<4-My{lv4NY@;wR-aleV6KjD2mKz*BrA&dh=s_`20iFx1yE| z2BhIYW?S@p;ruz@ukUc5LA~SDlRKPWgZdHGQ&9_<A=MN@BP<KYVxF=^F(OumNunu$ zCOAXpUr!90yY)4yCd65{ORMkGKU0ZCEV8Lx!-FiGS7xK0#LtiK!Rq*{X9mt#*;nGR z!~BMsfm>Ggjd-kqKVEU_aAr<;S3R_;a%C^I67#S`{f?jw1EUTDTfiFMp4u#!@EX%p zv~JZVbkK@`Q$P|}=lkfRoZ3y7q%q|=LD2z1AO&nJ`kS&pA+j+@O-`0m0)7Mbg4caj zwFltH*Fis&KGU-0B?0#VnqRX_?UQ?Yb4u{F(q8fvd24_MV1lhh{gk|fsK($YrNgEi z+Bm=gAzy7R1CDwf?6Yjiy%A!9)KB7%u14(yR)b}-2QZb1kH%Ejk#TZFWP$o{H%u3j z+q9&)@!=Mtw0&l-4;QfjwawHwDx9DjDk;>45o{)RU>7%&2a#Iad-t;je;T6Ph77&* zJBjz$XTj1ubFj{<N5VTeoL>e8g2l29cpr0PD>@yu0eBxgFnMN26*4N8MOtn*DBesF zFMeV{tB3BJwtd~kKg&C3xJ$hlYKeZ}oKR+^(rhM#m^d{?^wQq3P_KJv9mp15Hj(~j zABW`|?OWv-$AWLPEutWvim9eVpiuxQb6-j1%yJAOL6@kbKBali*oaK?U~-j<(#4R- z!!v0r)$b-~XB*=v?~YzfA{$-e62rPWyhnvSzh&8cRpBmmOjft+!U!(Nxk4W+dvW!^ zlv!dL+Mq*)f)6P8O4s#5*CfRwl2Gt7?$ug_$Bdo0f!(u);HDf7W%CLeP$r{BU=+fe zlPBx5*2hs_uI*$gF7<=wgtsRff40BvSQugLVwO8UU6-Qw?#E8vF=KmZ<`^$8uGV04 z%sV<$ie+6eok?iL^rP~CDDZ+Tj+1^xfsDFfVcajf5mR<ZgdJb&0XjT&Lnq`;lUr;w zC@<q}>Mqz=khMIP`&_-MM_4fRa3aQ>YHZ6|zSUiBM=oL;!@RUjS$Dj(PcvbmU$8%c zjgCcOf#u){*<JX%(Fk@DzfyofM(Eh)29+E!WrKu$;31j^xZv3}sUTH3O>S{nTsM(! zQ@aCulR@*|8hbg!)0q=ZAsk<2@X3sAlgplqe@Zb%`n&gWXw8L*UfO)__m9MZoq<5H zY~M}vde?Pea&J-<A}N-ghsM65d(7C0G+bFZ#!W3auIb_gc(8-AaSv*x>k>dqb}vSc zGE`n@Z*dF_XkryXC_^qq<i`zp@~%v?bn1&yabW6VK2H?P)n@;DAEu4s*%%2b1Hge5 zvQGFzNS;bZMzqmx&Dk)5=ad~H;wS<S(BV;vNi?U)EoMypv!}|Yb_ez*F?W^ihl7YZ z;W)^U=2f0%QVO^PZPT=->($}*mI+8`nmdNmaTDiBO6hnHIey+c)N|ZAJ3Bo+F)=?s OLC#3j&t4Dp@%bMjq~}5a literal 0 HcmV?d00001 diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.smile.gz new file mode 100644 index 0000000000000000000000000000000000000000..e248778b372539560625af3aba6b97a801a4094b GIT binary patch literal 1906 zcmb2|=HS>^IX9JwIVH8ABtthpEloEkGdHtDuQ)d|CzavtC2gTxPr<gp)<m@()t^Em zBUlzKzo@Lh)cC~kRrf;wwUGjQU8mjrzx>+#|6h*V|M_!WfB(N9^Ui-i^8Ly8E8o9- zKl6Rg`<w4~+W-H1`@H?Vzk8(LKimB^Z@=~XdF9{lZtukc&u+(rYqw*<x7#t{-0hgq zHoyAbw>@twZ-3tQJoo(VdD}q(W%n`Rw|#GGZ~xx*J@@_Xd)xQsSKkK;?6v>@dF#i+ zpFg&!+dj=aU(uItt0h0R|CIaF#h(IyuBZvI1981pw4k6>OT3WL$|+vRX!RB^By?&; z4+;vj)N+5iLT2jXqL98*K+ft-C@B2ts+g&(i$d3(3f-v{-g-B|{047X^N^uZ%y0UZ zO};x+sHE*Tbj!N$4izeG`_0_4*>{Hul~{g5wygc`P@xXLOZ>i}`iAWrxibFU?RSR~ zH@0m0-PH1%LxtM=Caz3>_w>7`-=%)vRDE*@alf1I{{QFc_5FYT=Kufky1xE@9T5Ke QzyCjDR=sb+133l;0Q8?wRsaA1 literal 0 HcmV?d00001 diff --git a/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz b/libs/x-content/src/test/resources/org/opensearch/common/xcontent/depth-off-limit.yaml.gz new file mode 100644 index 0000000000000000000000000000000000000000..3b36594482a6818c5d2f230aa862c21297f0cac8 GIT binary patch literal 5950 zcmWOAc{~$*90%~<PID`YoYO(|6w4Wg(IJ^56eUc^9o2-*n4?JwQCZGa<c=Jz)R=U6 z3S*8UcRjAyp2s#<c>MEuy?(FXU*GTR_kMj}zpq9b3iTqUz#j_p^N%0~9uB{J`EY1Z zSP)Ss#wRQk%>S@`xQDq7a99qHr&uGC*uXiK?3jFX$Z-6)PVI2Ty$|?H5@VZps~2?i z#L!Q+?@F-TSe|6YZ#J$hGDqV#m%N8^He*&S-wbVK%i8AVXwK%*KkFL0n=!HLMY{13 zq%|?!cw*e9jc$DO%5sWseC+z~3~oZ0#b%x$YN%#IXMKKQuEjpSrDbVp##?n^Gffbl zKDwSKsBswGtQ7D%Gyc30=;~&ye-VV&WNZu>K#ejs=L9^a-3Dbfb-1qRlx4m5s{Tx= z%$N9s>p!k`bd?lgtJm5E^;3<Od2y?C8>QSX@1m6z1&mP_#d`J5e0|Y93;(4*rNv#} zr<U^twkg=6`xbNbx%!^cg8wKNR^MLl?6ND0ZeL5C(XV-Jsqf!zzu{<K^ycutuV=EO z{4Go<qUPR!Qb)`CYXZT_r>vr)=5be%N>5b2UA)ceL%Wr5+tKUQ|C-Mz{;)3yKRw!5 zE%cljx*=>nOc`zdXQ?C{+q?czQ1EGR^ii|Wb<=~pROk^KzWR?{Ubw?3B}_OxBTnWn z3~`sjJ>ud`6FrCaocdH7PLZ0~FEgL9TkX?}l*v;z*JdYA^}NU@n>-{M<iw?GsdY7$ zPTER7$ZJmDuf~1J^gV6UIL9mNxjIkAJ}3I6SY#Yh8+tL|YiAQRyENVNZvok&nV6i@ zoPklBY+UlihgTb|YxtXXh9ytpZJN@q7SFgZok=%To7e2@el}@-(sYFQzSP|&KYc}Y zp3XhvJ)st9x)Aby(&4639^IxeouVTA#_i@!s8dWgC_g7110rrwkTa_3-Cddm9}aAs zFJ3&F5urYV*YVA_HO-mZKXt7#^T(a*rcC+4bfPuWW$<p~X=e6d#tmmC*Yr-*1*W`N zdh{ixi`m_nU}m;i2I(4;`{oXr#FT%V9-GK?xYO~iG}^3vep)-mtYdh&G{&rZT}qo| z*4L(5N;3OCc}SaVHq6DAlFh~!@!GLwzrI{7jWwH@3)QB~c?^Gz8f40kc1BMyT}C@% zW|`TeU8Mh*T+6TIRi<P_{tEPmPHWJDUBnyI)OX!?^LiIA(uQunYLc_m{SY_l{D*Sn zrOwB>FE6f}jCf_TB>&sUQoWd!q4;b1N$idD4mw|^6hHDxy}guM>PKE5jmuy~#mK}l z*Z+a8P?m=5enyRn>12(jX*A*E+~WAwE9)!ccE6%#5sF`U-_vw*Mp=qpn3fD(mY+?_ z)0$U-apB&P-~Z^e{P%8UJx*A%XtpV@*ma%llG9b+nytuLe8<h<=pU!0O~2m#$sZfj z|Cy+19`E<&exa?N7ryJjdEL^0oUe+#i+GXU?5<Mw3E>ZypLL|22Z=uB8V{Np0=n=B z-RDzfPjUw8&yGv}j;UPUCz3ihzMHI5dF_#T<=G}q;b?_ta2Iih?#rpSxjAbS-)sIu zEMeW(v&bhyBf8Cvjb8sQ+^6Ch-Idd!!wTr6DUMzb<Zczy@UJxg?cnOpi{@=NBIL0H zH?FE+=WblvgWbGw{V%M16j2|m9TjPYHH*4&4C@#bWry{Rigv+9MMZnuQ0bZ|7*Xn) z8Xi&Z`mHvq+V!vJsCt*EU=-c8HaxmJ1VFB2MJC8G+b>vUs`#&?|E?aq;DRF7SLr7= zdG@O(4rAuFjbC|Wc$;e6(RAYT(B;E>rtj7_Zxi*OUqr<&RwW&8rkI87X$@xxv-nJo z^54L?yXkK^X7`0JXycyMFBoP|?pK^K*R;cl$sOz<826Go1DV9|y8-OH;NgJJL&1jw zIVvHPz;7NQ9f4zcA!<R>Lm^>7e3ejE(6UFUbZ}x`sAn*3D7N;TVf)>eafTh$FP|B9 zcf9O0?3;fnGW;&hR5uzv%sgo{ZqK}8^ec?{&}il^v)O2_+VaBnyybs9Qu0<s9(j4O zz`Z6f4&~mak4o<sN+oHuu`g%@q1`tpr<`iFKG(Do3Nl+#+S&&-!;#6t<0I1IA$MYx z6#CUx{M_BsesU9hC|SBRnxS2@s%rm{*ZSD1uxn9NciztpITH`quOX{Tp7+IQsg2I+ z%KQ5rV<O7jU4D(XUMhHiX!|huR~qt;(0!-Ou8@X9>IVH`=Pg2Y`mdcgjHu$(U7L1a z^cRl3kohX#_Gu8~D2<-`fHg_3(p(gv>z?CjK7Q4Y4#C=2^<!u~eYpnTk=c>KdNSG} z1Ab~w{q^nG;;O6J6sPk`&)BtfJ00#N_*3$&kA2BC`GCxcB;J;;S^qd_dP0mYydfcb zB!sV^;n)AG9P_oR&&8i{zNNIsss5Q`W9#LDqT6j9gO+E-@|PgJYS%n?s4|P`x5kXt z+C`lNEm+qYLRs3Jry6Lq{5FL#&R@Ke_zcEblTenm;$Z?UP^<yQI_5{rfR!Swdrj7K z*X@y1P6t6yn4C^~8l_D%M6(<p*K5wGNe)Y&=6Oy>xRVr*<-oXWyH-_m$-L0%8xf?8 zW1rf%Z`7L986!)a6*-~Itp&02C2P3SvymO!E-^^)$1@+#7Hs!HRxaV+?|LrAD?}L* zSQ)#&WeU@8M<n%=|ANYR@aj+|5v<YO(;Q)ET5Zw*83UE|=6yt&Q(3xuL~<6Sl#|O= zzgfyKDPP{8(j_!`^CSA$cQX^(uCF89Bb*4)Li%H5!%~ctQ88=M_m$N=OJh=cE;}kg z%A}U1;m3JAU!d70bA`8US9CO)`I!Uzv33%vF)?DL_XoY?l_gGl9rXZzgu`m_OMe_E za>x!szFEr08~k9c`Bgx8l>OP#F}zccu5ifVe<!z%ym5V|x;#>&H^W)#d=MnovQ+!k z?2qn9hn^75Z+_TbN{xDjw-kRn@WJ3!^gvoF-*<QPrex5;Xl*G=?WCpm{1DUVD5)B) z<fV2#>GVaGFD)wj9XVhbbNFo<%~6ZC#O2TQ@;jBA>8c~XZ8_YwH({^zt={rzZ(QD+ zMJ!*=iq@4G#b>vDd~+?D@FY53M%I1@uXnt)eZt%fi}id<PC>|jT#I%$@PGR><!1Ir zGT9v)_V!E44*5@O6nFi|mm@1}f44uC0p}bw8X^4maA#fMXs>Aq=l|U6>;@bi_cS6i zeRh{yQV+S<+C%+v@v{o(0peh}C-si}X>*ugfpf4bda>Af34AYoZ~?U+^P<-VcdxwU zGE~A|CsM&FlvG|#O_ay85cl&hkqwocqO;1sP$#(fYNRVYaMqadx2p>m@<1!kRJP1$ zxHk^>LA+EQUg2P3xZ5eFr(7jrkh{Qm2v^2qh5I;uv*jiUC$k;ZBn<N6f?XKnaMv2B zeJMkEXCPL*uMxKxRMw9$rQe$MW~7uyyUN1~@FyenI2}Y+GnktFDWL(U@4l-qtib+s zq%mi>!8Hx0=71NPaDMf=*24-M93rutIRUnOJ|SJZjHEG5x?KTD?9?uALQlu#R_sUw z@@LoOlIaz}x6z;d&IZ7Jo>dO+K^ge;?XfC8KtA^r-l|pIv=3z*)~9F1POWSfw|4L} zbd)3hSCN!h%XJG=fETi7OV9#^bH@;^+Ao?8-m<u>8(dAGrdDl8(CB{$*AX(?s~jY0 zk2-^y1crEZp(HK0C-^O)*0y><l9tyW+)7}lR$EA&%pY_!^u<j7JZFP!a|mzr$ME59 z;mBi-R}BL(%X{5&kZq3F8-p>e$K6_xF8D~pFig9j+Y~YzzchJrUIlM3-q+UZ>f&`* z6>l`t*U?JkdL357AN{v)p!L3scZV9@bfs^+wSns$rjAzyy>qQ!H3^k8SuI)U>?&au z!|0cVCswao%1Oi}Lk$yf63AJ2LeV&G9?ky?U8W89x(PR=vGW*=1-WRIoi~EFCCDaN zQFilf1D)zovC68wN1)!xVvTp%IS^~so(S#ZWeM;Fq<G{#wy}?O^<I7$!^u)SR^y;3 z_+Uax;!RV#vDS#kqV3T~31y$)JCmghbJ<TAkFUtPD;f1(gQme!Rvic@rkxhWt0dHC zaS4&*lMEvTcQssZ8q_ga&ajy6Vs+f}0N+pPOo#+l?No0ObeDB%t}EIObEZ_nPeMD` zEg=+g!u+pH-D1_gSJ3lGx0~G1*WJ%ZO8Utjn)OY9K0eWL$Z7AZvZn}%n~IH%xEJ8* z$j<#rmwLybCz9?oU1WsPP0ITax5bo&Z*VuAo<377+aB7x0)6oA?w~j00{e`tl&h3U zaB@P0mCZ25$>ew0K4gWMy6^*Topb%%btwaJi`mSCljAmFdYnU`+zqKrLmPg@anZ%k zcLl1_dQD*+uzRZkj3lRGE#>LmO6A$eRg{flBNt~3u{Ag3oH^y}D^1*Szt<C1aC2Yd zH(XSLEyalA6X;woP282=dlgnd({3EbF(Ic}#+>k5O%)KaCK>11cL|+s_b*&!tl>)H zR8evjz4u|7i4RP#qffnZCd(v?yU%tf;56;-n#vsvEcXzzzFEyZdDu)&C!#!D3`)~6 z<l#P#mB^^0l$(1$!hB$KQVco*<#J3mS?1F0Y{HlizS>;QBD1_jY$BzuIyaVZi}1`r zLuqe*`LGyGJaBd;A;CIVIi66)c($OSbf{_u0cet;t1v3$SY9E7@OkW622j#}J1^J? z+K@-rk!-f6gDez7%;e?(DfNE@2SJ;d>AEBv{@B40hB;BoodWpn3jzzkg=5#{*jUZ* z8XGA6;Pgx$(2^vR4p3*HMH>=?cbw8GC|z%PrVVIG$CLD_K`=&~G+{!nECoh4Ma|BE ze`J!xSn6y+g43oLT0vcq3=+pP2xX3N^vz?eJ-`z7ByKy^Tk}kYIJ=Tj#)FU9yKlWU zz7A&A5fZJToIyGVEX31S-GPO42FU|7qcT@LL9^675&<m0Z7D}UR9msvE^0WmWMeCj zl@uyd#S%*_RH!!Ze%bP**+~!=OBr^Xrb~tlJCU)~Y{Hpuy<cFpn21>(#9h@?_y<&> z3RkJ%+)YQ875EEQ7N<#V)D%>upvF?s6*Z`_a!kblYOEbsv56X+ajMW3M<2uVmBKUk z<<ELTKPxy1^&x(e<)nO|hdfJh1SV~>Cv+8X|FA18CD7!+K7E7}`qk`H=>F|q!gCM> ziQ3g_aQmjaxjR54RW%=?KJhtMvlBOttBjXq5X)GzGte@*P+<(jRh&s`0S6Hm*9oA{ z`nmcM>TN5xQ6*eoZDo!W<JwrCH`2*mFlzz}l#3NUh8Un)rtXdyP)|bKTa<D2a#bnP z3?I4v0VKyjYAzBMsCrA-0x^*NK$-xisE?~ZK@;i|X$qK%e_ovdB=L3+^BX`7;ZKPI zo;Uqj(Vzfvc|8UU-@I&225QIvN-XfC1+XZf02#O*2Zm{Z=2W027DPz^<|#7;URKmj zSmTBjb>UcJp*0n>UMSMU-HNDQl0%<j_h+EwwEX7Iz)=bf!o$#Hi6PQ1NFw6O`d!d% zz2w!k2ge?(mfeo_Tkcm8mouQw<-k!Y?}ScJxXi@rL5OwARn|k$3VXd_N98}h=%~d8 ztrv^5*r@S8j3{|RCiQudJb|A1{F6L^!FbM-CzRDbS5zR>vY%Tj5MFVf2PhEOW6v`b z2%q`SnF<6Bs2Not^vcEeCP`o-G6!-bFhu%5r35CLG0-A`i7gx8N?@#N2c{%2CteM# zNnmW*1G17B`_BV=Br#_=1ICgVmv5FcB?$T<{#*@$t|ORxji6(e=h_f-y+3mu2)Y4e zdH_K;LQIb%=tsq;=MZ#L>1iQ?j@>rBiJ+VB$QQz4r?y)94)nBU5E)TQ)cbM*c^F@R z-}DjWn42>vN{U_Cz);*pHBPmP*8@D~q(XmSu~&@p6~y9Y6<Hl1HWw{0gYi$?=D?)c zIkwi`1|X^@*((SvXi2PdL2ULxfh&w3R>ujDX2WKzd4@oCpVay{fPJX<oYVk`KDza< zKoYD5IbWpNPu@RnQK8-}Kh|Xo(y~)5BEY#~S=KnHI*nfa0C9h0Ey#q`nx;<wB}Rx; zejO^vh4Ec?cgM@Hv&)Y=ngT~{dCDyCJEct;fPBt!64b*o50p$_6+?SCcQ47{J~lj2 zRHs&@o`}bS7?&^Sy0GATcA5nVNE9owgh0U-vpNsifB%wT4CZvGefo<SgOJ-@DU0hq z{xn{L`mX$>qb2x|eb*u${9T$?FcJ?c_NuNyfYX6r-}V4%u<fU22yn%liM9hq@WhK2 z03Ntxn2Uk3!r0%(04_=38x94zd(mq<07nZkKkox@^?UuiVL<trCRq(|47PpW1k{h^ zp9@jI!11KW7BG^Cempp6I(16q0vOr%{W1`s$@a9!6EKPzh_#9^;JHt0O&@U12iFC_ z!0<lpwWEOJL#ktL-DvG!GY6cY+jU}aFsyZ8%^Gm7=GM8vfv5JtH5<T*sxTdNfl?Q; zA5gRaD)aeoa|rHMvcIJSNGv`?b_H@;LEo8BDlX`Y*G`ap`l5&l(C)K!QwT7gl|#XR zTRF`RqK^RW2d(!;0xK7~xdA}sz5iVa!HpSQu8{`T+Inl@Ko0xhdp3;9Z~tnc2Bb1O ze-A_0a^f9v>fp}3o=(v+Akd31kIDk%=@1bVLMVHGIbIGR?ZQM-5W=gUmo>HlBtBfU zb8BwE16sBY_E$w{2;tk_0PpSKqy2S7@&kY&KB<ov2T7%-<XnJ>t*h@62lB;c<RXAM z@vdG&0%R9s$;AL;-%}qh0pv@}$>jiZcA~yT0$eX?t~7T8be)y&DF~`Kw}^Ef(DfjL zH3+J$3&Z>(pc{z~4j`zhTn5Vv&`q}uZX&4dE|1N90NrBOpf-{^!yS+G<tc%^?={4` z<McqL^Aq#SU@XA7fhPt2oK6!xfl%F2yEhB~U(0Z?9tlSF*{pp7>=Z#@xC&U_Yc<G2 z0t>r)9^^5=R<!MDJ_`8$xeabI;D_yfQ5S^zro5-n6!7iq8oFdaY@W_M3kGEO;@AEK z>|+PG>gIq?=xGR-1=w7pDA9iqDk7D;VF7CQM-DEEft^~8YXHKY5Cnyzf%`q&ydoS_ zI+vLz1A_nd#u_<L<?vLb45eysFJDgqV~<>$nA+O__7Q&G1r8ibUC4?M94$H6au4{N z<vf2C4jyGyP%;48<3gk2cHoDv64^khRpot!wjk5w^`MP7xK=ul#CiZ2xWvZM?OUc^ zhzL-swQYYh9ylH97_1Zr5?UA5j3Kyg*APc-us=U^ei05naI0BGfMNZvQ9}Xro_cLV zJ`TZ^6o-uN2W1a$%<n^h9r88nC0np&8>bY2t3$KsF_bzxGce)`IB3XFjReRoCXn4B zxTkwVz5fD%4`_~}CMcDb`fbB)i`I@Ic}bwG?Yrg&!7UO)d56F>T64%za?4h`=o^&k zkX1)v0!Cg#sICr}-hX7MND`1vcLb1cKyU%=-z~fV$CLDusR)R4#{3cjcs>gxQz5ug z+ref6;DqJAbW;Mv{IYosB=9T^Cf|nOrrHNBd;y2t)clPs1)zIF*Y0ikx*wLI2T)lJ z^Bzdx)AN$`6`-9aUbgJo0yZqd!l;DyA&WpDr$ZT1kp@t$h_w<3?!7`>V~R3>+fRtj z!l<=DKfFSK+|heO@ms`7uaWB@IKJPNA|rq*>YN`&0@z;S+UqT22Zq(d0p^r5lcxft dvidi-cGjEgQ}G+Zn)p8=OAn1@ncY6J;D3!l^~eAK literal 0 HcmV?d00001 diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/crypto-kms/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/crypto-kms/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/crypto-kms/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/repository-azure/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/repository-azure/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 deleted file mode 100644 index f0d165ff7cf82..0000000000000 --- a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f3cdb002e0f2f30ad9c5fd053d78b1a485511ab1 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..ad4e055d4f19a --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-dataformat-xml-2.16.1.jar.sha1 @@ -0,0 +1 @@ +d952ad30d3f2d1220f39db175618414b56d14638 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 deleted file mode 100644 index 40379694f5ea5..0000000000000 --- a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77e3a27823f795d928b897d8444744ddb044a5c3 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..4309dad93b2b6 --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-datatype-jsr310-2.16.1.jar.sha1 @@ -0,0 +1 @@ +36a418325c618e440e5ccb80b75c705d894f50bd \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 820d14b3df8e4..0000000000000 --- a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -684daae9ea45087c670b4f6511edcfdb19c3a695 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..5f54d0ac554e0 --- /dev/null +++ b/plugins/repository-azure/licenses/jackson-module-jaxb-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +e9df364a2695e66eb8d2803d6725424842760125 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 deleted file mode 100644 index 79ed9e0c63fc8..0000000000000 --- a/plugins/repository-s3/licenses/jackson-annotations-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc30995f7428c0a405eba9b8c619b20d2b3b9905 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..cbc65687606fc --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-annotations-2.16.1.jar.sha1 @@ -0,0 +1 @@ +fd441d574a71e7d10a4f73de6609f881d8cdfeec \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 deleted file mode 100644 index da00d281934b1..0000000000000 --- a/plugins/repository-s3/licenses/jackson-databind-2.16.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a6b7f8ff7b30d518bbd65678e9c30cd881f19a7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 new file mode 100644 index 0000000000000..d231db4fd49fc --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-databind-2.16.1.jar.sha1 @@ -0,0 +1 @@ +02a16efeb840c45af1e2f31753dfe76795278b73 \ No newline at end of file diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index 8ebb007787828..9b8fa7eec37b9 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -46,6 +46,7 @@ import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.Assertions; @@ -149,13 +150,45 @@ public enum MergeReason { "index.mapping.depth.limit", 20L, 1, + Long.MAX_VALUE, + limit -> { + // Make sure XContent constraints are not exceeded (otherwise content processing will fail) + if (limit > XContentContraints.DEFAULT_MAX_DEPTH) { + throw new IllegalArgumentException( + "The provided value " + + limit + + " of the index setting 'index.mapping.depth.limit' exceeds per-JVM configured limit of " + + XContentContraints.DEFAULT_MAX_DEPTH + + ". Please change the setting value or increase per-JVM limit " + + "using '" + + XContentContraints.DEFAULT_MAX_DEPTH_PROPERTY + + "' system property." + ); + } + }, Property.Dynamic, Property.IndexScope ); public static final Setting<Long> INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING = Setting.longSetting( "index.mapping.field_name_length.limit", - Long.MAX_VALUE, + 50000, 1L, + Long.MAX_VALUE, + limit -> { + // Make sure XContent constraints are not exceeded (otherwise content processing will fail) + if (limit > XContentContraints.DEFAULT_MAX_NAME_LEN) { + throw new IllegalArgumentException( + "The provided value " + + limit + + " of the index setting 'index.mapping.field_name_length.limit' exceeds per-JVM configured limit of " + + XContentContraints.DEFAULT_MAX_NAME_LEN + + ". Please change the setting value or increase per-JVM limit " + + "using '" + + XContentContraints.DEFAULT_MAX_NAME_LEN_PROPERTY + + "' system property." + ); + } + }, Property.Dynamic, Property.IndexScope ); diff --git a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java index f0f34dff0a38f..bb3f2be8ea748 100644 --- a/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/MapperServiceTests.java @@ -36,6 +36,7 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.compress.CompressedXContent; import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.XContentContraints; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; @@ -65,6 +66,7 @@ import java.util.Map; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -158,6 +160,26 @@ public void testMappingDepthExceedsLimit() throws Throwable { assertThat(e.getMessage(), containsString("Limit of mapping depth [1] has been exceeded")); } + public void testMappingDepthExceedsXContentLimit() throws Throwable { + final IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> createIndex( + "test1", + Settings.builder() + .put(MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING.getKey(), XContentContraints.DEFAULT_MAX_DEPTH + 1) + .build() + ) + ); + + assertThat( + ex.getMessage(), + is( + "The provided value 1001 of the index setting 'index.mapping.depth.limit' exceeds per-JVM configured limit of 1000. " + + "Please change the setting value or increase per-JVM limit using 'opensearch.xcontent.depth.max' system property." + ) + ); + } + public void testUnmappedFieldType() { MapperService mapperService = createIndex("index").mapperService(); assertThat(mapperService.unmappedFieldType("keyword"), instanceOf(KeywordFieldType.class)); @@ -300,6 +322,26 @@ public void testTotalFieldsLimitWithFieldAlias() throws Throwable { assertEquals("Limit of total fields [" + numberOfNonAliasFields + "] has been exceeded", e.getMessage()); } + public void testFieldNameLengthExceedsXContentLimit() throws Throwable { + final IllegalArgumentException ex = assertThrows( + IllegalArgumentException.class, + () -> createIndex( + "test1", + Settings.builder() + .put(MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING.getKey(), XContentContraints.DEFAULT_MAX_NAME_LEN + 1) + .build() + ) + ); + + assertThat( + ex.getMessage(), + is( + "The provided value 50001 of the index setting 'index.mapping.field_name_length.limit' exceeds per-JVM configured limit of 50000. " + + "Please change the setting value or increase per-JVM limit using 'opensearch.xcontent.name.length.max' system property." + ) + ); + } + public void testFieldNameLengthLimit() throws Throwable { int maxFieldNameLength = randomIntBetween(25, 30); String testString = new String(new char[maxFieldNameLength + 1]).replace("\0", "a"); From c8ae7f05a7823c480c5e9b2dccae5ca26ad0908b Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Fri, 12 Jan 2024 16:50:30 -0600 Subject: [PATCH 087/178] Fix log appender in InsecureSettingTests (#11866) The appender was being added to the logger before the appender was started. This can lead to logger errors with concurrently running tests because the logger is static state shared across the JVM. See #10799. I've also added a removeAppender call that was missing from LoggersTests, but that is mostly hygiene and would not lead to failures. Signed-off-by: Andrew Ross <andrross@amazon.com> --- .../common/logging/LoggersTests.java | 63 ++++++++++--------- .../common/settings/InsecureSettingTests.java | 7 ++- 2 files changed, 38 insertions(+), 32 deletions(-) diff --git a/server/src/test/java/org/opensearch/common/logging/LoggersTests.java b/server/src/test/java/org/opensearch/common/logging/LoggersTests.java index 17c4f9d0fe13d..d9db57aef15b6 100644 --- a/server/src/test/java/org/opensearch/common/logging/LoggersTests.java +++ b/server/src/test/java/org/opensearch/common/logging/LoggersTests.java @@ -53,40 +53,45 @@ public void testParameterizedMessageLambda() throws Exception { appender.start(); final Logger testLogger = LogManager.getLogger(LoggersTests.class); Loggers.addAppender(testLogger, appender); - Loggers.setLevel(testLogger, Level.TRACE); + try { + Loggers.setLevel(testLogger, Level.TRACE); - Throwable ex = randomException(); - testLogger.error(() -> new ParameterizedMessage("an error message"), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.ERROR)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an error message")); + Throwable ex = randomException(); + testLogger.error(() -> new ParameterizedMessage("an error message"), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.ERROR)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an error message")); - ex = randomException(); - testLogger.warn(() -> new ParameterizedMessage("a warn message: [{}]", "long gc"), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.WARN)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a warn message: [long gc]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining("long gc")); + ex = randomException(); + testLogger.warn(() -> new ParameterizedMessage("a warn message: [{}]", "long gc"), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.WARN)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a warn message: [long gc]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining("long gc")); - testLogger.info(() -> new ParameterizedMessage("an info message a=[{}], b=[{}], c=[{}]", 1, 2, 3)); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.INFO)); - assertThat(appender.lastEvent.getThrown(), nullValue()); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an info message a=[1], b=[2], c=[3]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(1, 2, 3)); + testLogger.info(() -> new ParameterizedMessage("an info message a=[{}], b=[{}], c=[{}]", 1, 2, 3)); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.INFO)); + assertThat(appender.lastEvent.getThrown(), nullValue()); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("an info message a=[1], b=[2], c=[3]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(1, 2, 3)); - ex = randomException(); - testLogger.debug(() -> new ParameterizedMessage("a debug message options = {}", Arrays.asList("yes", "no")), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.DEBUG)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a debug message options = [yes, no]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(Arrays.asList("yes", "no"))); + ex = randomException(); + testLogger.debug(() -> new ParameterizedMessage("a debug message options = {}", Arrays.asList("yes", "no")), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.DEBUG)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a debug message options = [yes, no]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(Arrays.asList("yes", "no"))); - ex = randomException(); - testLogger.trace(() -> new ParameterizedMessage("a trace message; element = [{}]", new Object[] { null }), ex); - assertThat(appender.lastEvent.getLevel(), equalTo(Level.TRACE)); - assertThat(appender.lastEvent.getThrown(), equalTo(ex)); - assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a trace message; element = [null]")); - assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(new Object[] { null })); + ex = randomException(); + testLogger.trace(() -> new ParameterizedMessage("a trace message; element = [{}]", new Object[] { null }), ex); + assertThat(appender.lastEvent.getLevel(), equalTo(Level.TRACE)); + assertThat(appender.lastEvent.getThrown(), equalTo(ex)); + assertThat(appender.lastParameterizedMessage().getFormattedMessage(), equalTo("a trace message; element = [null]")); + assertThat(appender.lastParameterizedMessage().getParameters(), arrayContaining(new Object[] { null })); + } finally { + Loggers.removeAppender(testLogger, appender); + appender.stop(); + } } private Throwable randomException() { diff --git a/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java b/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java index b256ab956f963..9358013826a1c 100644 --- a/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java +++ b/server/src/test/java/org/opensearch/common/settings/InsecureSettingTests.java @@ -25,7 +25,7 @@ public class InsecureSettingTests extends OpenSearchTestCase { private List<String> rootLogMsgs = new ArrayList<>(); private AbstractAppender rootAppender; - protected void assertSettingWarning() { + private void assertSettingWarning() { assertWarnings( "[setting.name] setting was deprecated in OpenSearch and will be removed in a future release! See the breaking changes documentation for the next major version." ); @@ -50,13 +50,14 @@ public void append(LogEvent event) { InsecureSettingTests.this.rootLogMsgs.add(message); } }; - Loggers.addAppender(LogManager.getRootLogger(), rootAppender); rootAppender.start(); + Loggers.addAppender(LogManager.getLogger(SecureSetting.class), rootAppender); } @After public void removeInsecureSettingsAppender() { - Loggers.removeAppender(LogManager.getRootLogger(), rootAppender); + Loggers.removeAppender(LogManager.getLogger(SecureSetting.class), rootAppender); + rootAppender.stop(); } public void testShouldRaiseExceptionByDefault() { From 988dea88a0aaae192db602f35f5cd6d714d07fce Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Sat, 13 Jan 2024 22:16:44 +0800 Subject: [PATCH 088/178] Update supported version for must_exist parameter in update aliases API (#11872) * Update supported version for must_exist parameter in update aliases API Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Modify change log Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../40_remove_with_must_exist.yml | 16 ++++++++-------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 10338f6646053..a4d42116fa9af 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -83,6 +83,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Don't over-allocate in HeapBufferedAsyncEntityConsumer in order to consume the response ([#9993](https://github.com/opensearch-project/OpenSearch/pull/9993)) - Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) - Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836)) +- Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872)) ### Security diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml index dbf6a4fad3295..b9457f0290897 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/40_remove_with_must_exist.yml @@ -1,8 +1,8 @@ --- "Throw aliases missing exception when removing non-existing alias with setting must_exist to true": - skip: - version: " - 2.99.99" - reason: "introduced in 3.0" + version: " - 2.11.99" + reason: "introduced in 2.12.0" - do: indices.create: @@ -47,8 +47,8 @@ --- "Throw aliases missing exception when all of the specified aliases are non-existing": - skip: - version: " - 2.99.99" - reason: "introduced in 3.0" + version: " - 2.11.99" + reason: "introduced in 2.12.0" - do: indices.create: @@ -81,8 +81,8 @@ --- "Remove successfully when some specified aliases are non-existing": - skip: - version: " - 2.99.99" - reason: "introduced in 3.0" + version: " - 2.11.99" + reason: "introduced in 2.12.0" - do: indices.create: @@ -116,8 +116,8 @@ --- "Remove silently when all of the specified aliases are non-existing and must_exist is false": - skip: - version: " - 2.99.99" - reason: "introduced in 3.0" + version: " - 2.11.99" + reason: "introduced in 2.12.0" - do: indices.create: From ac4aca210f799237922b15c4c55faeccd8cb9f7c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 09:16:42 -0500 Subject: [PATCH 089/178] Bump lycheeverse/lychee-action from 1.9.0 to 1.9.1 (#11887) * Bump lycheeverse/lychee-action from 1.9.0 to 1.9.1 Bumps [lycheeverse/lychee-action](https://github.com/lycheeverse/lychee-action) from 1.9.0 to 1.9.1. - [Release notes](https://github.com/lycheeverse/lychee-action/releases) - [Commits](https://github.com/lycheeverse/lychee-action/compare/v1.9.0...v1.9.1) --- updated-dependencies: - dependency-name: lycheeverse/lychee-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- .github/workflows/links.yml | 2 +- CHANGELOG.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 2714d45bd108f..61962c91b4903 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: lychee Link Checker id: lychee - uses: lycheeverse/lychee-action@v1.9.0 + uses: lycheeverse/lychee-action@v1.9.1 with: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true diff --git a/CHANGELOG.md b/CHANGELOG.md index a4d42116fa9af..8b6840b62ca5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -165,7 +165,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.commons:commons-lang3` from 3.13.0 to 3.14.0 ([#11691](https://github.com/opensearch-project/OpenSearch/pull/11691)) - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) -- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.0 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795)) +- Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) - Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) ### Changed From a90dd86646d64f76f9e08b11455c522b202d10ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Jan 2024 09:23:18 -0500 Subject: [PATCH 090/178] Bump com.networknt:json-schema-validator from 1.0.86 to 1.1.0 (#11886) * Bump com.networknt:json-schema-validator from 1.0.86 to 1.1.0 Bumps [com.networknt:json-schema-validator](https://github.com/networknt/json-schema-validator) from 1.0.86 to 1.1.0. - [Release notes](https://github.com/networknt/json-schema-validator/releases) - [Changelog](https://github.com/networknt/json-schema-validator/blob/master/CHANGELOG.md) - [Commits](https://github.com/networknt/json-schema-validator/compare/1.0.86...1.1.0) --- updated-dependencies: - dependency-name: com.networknt:json-schema-validator dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8b6840b62ca5c..6d2558ef5743f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -167,6 +167,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) - Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) +- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3c846b48549fb..1cb21acd14af7 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -118,7 +118,7 @@ dependencies { api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' - api 'com.networknt:json-schema-validator:1.0.86' + api 'com.networknt:json-schema-validator:1.1.0' api 'org.jruby.jcodings:jcodings:1.0.58' api 'org.jruby.joni:joni:2.2.1' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" From c132db950548ee99127bc92be8d0f388cc0f4a51 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Mon, 15 Jan 2024 13:01:29 -0500 Subject: [PATCH 091/178] Bump OpenTelemetry from 1.32.0 to 1.34.1 (#11891) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 1 + buildSrc/version.properties | 2 +- .../telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 | 1 - .../telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-context-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-context-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 | 1 + .../opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 | 1 - .../opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 | 1 + .../opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 | 1 - .../opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 | 1 + .../telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 | 1 - .../telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 | 1 + .../licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 | 1 - .../licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 | 1 + 28 files changed, 15 insertions(+), 14 deletions(-) delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 delete mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 create mode 100644 plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d2558ef5743f..26d324839ae54 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -126,6 +126,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) - Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) +- Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 3813750507f18..dd7f2e1eaabf0 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -70,5 +70,5 @@ jzlib = 1.1.3 resteasy = 6.2.4.Final # opentelemetry dependencies -opentelemetry = 1.32.0 +opentelemetry = 1.34.1 opentelemetrysemconv = 1.23.1-alpha diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 deleted file mode 100644 index 2c038aad4b934..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-api-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a5c081d8f877225732efe13908f350029c811709 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..19f734ca17b79 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-api-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b4aea155f6d6b1032eba85378564431cfd86f562 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 deleted file mode 100644 index 3243f524432eb..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-context-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5f8bb68084ea5709a27e935907b1bb49d0bd049 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..4c06d28cba199 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-context-1.34.1.jar.sha1 @@ -0,0 +1 @@ +3fcc87f3d810ce49d865ee54b40831559c5e129b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 deleted file mode 100644 index 1d7da47286ae0..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3643061da474061ffa7f2036a58a7a0d40212276 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..91a5c0f715d2b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +19c9a3f52851a1333b648ed83c82d16eb4c64afd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 deleted file mode 100644 index 3fab0e47adcbe..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ab56c7223112fac13a66e3f667c5fc666f4a3707 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..6c05600ae3b08 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-logging-1.34.1.jar.sha1 @@ -0,0 +1 @@ +b3e74d5b8cf5e60d9965042fa284085bbe081ce3 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 deleted file mode 100644 index f93cf7a63bfad..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5752d171cd08ac84f9273258a315bc5f97e1187e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..f54e6f6893050 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +af68f90f0410b7b3a1900d3e0a15ad51b10ffd5b \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 deleted file mode 100644 index 2fc33b62aee54..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6b41cd66a385d513b58b6617f20b701435b64abd \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..49d40b36ba85b --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-otlp-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +4acab18052267e280d1f9de22c591a5c88bed3a6 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 deleted file mode 100644 index 99f758b047aa2..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9346006cead763247a786b5cabf3e1ae3c88eadb \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..a01de2aa84c43 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-exporter-sender-okhttp-1.34.1.jar.sha1 @@ -0,0 +1 @@ +9f07e1764389e076a36fb7d9e5769e29f3dab950 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 deleted file mode 100644 index 705a342a684c4..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.32.0-alpha.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fab56e187e3fb3c70c18223184d53a76500114ab \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 new file mode 100644 index 0000000000000..a5fc8c2059104 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-extension-incubator-1.34.1-alpha.jar.sha1 @@ -0,0 +1 @@ +9201e6a43a0a89515626f7516c7d1b2c349f76df \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 deleted file mode 100644 index 31818695cc774..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -504de8cc7dc68e84c8c7c2757522d934e9c50d35 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..cd746f0756e46 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-1.34.1.jar.sha1 @@ -0,0 +1 @@ +ab49eb621d6d01f0ad2f016989d0352ef18ea9a2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 deleted file mode 100644 index 3cf3080a98bd9..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -454c7a6afab864de9f0c166246f28f16aaa824c1 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..740737dc13efc --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-common-1.34.1.jar.sha1 @@ -0,0 +1 @@ +01fcd8bad38d7b8987f6fc93bd7e933240eb727e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 deleted file mode 100644 index 41b0dca07556e..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b054760243906af0a327a8f5bd99adc2826ccd88 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..e6ff3dbafda22 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-logs-1.34.1.jar.sha1 @@ -0,0 +1 @@ +abad9abc80dfe6118a60413afa161696bbf8dd43 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 deleted file mode 100644 index 2f71fd5cc780a..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bff24f085193e105d4e23e3db27bf81ccb3d830e \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..36ec960c4f7be --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-metrics-1.34.1.jar.sha1 @@ -0,0 +1 @@ +d88407ae475e5f4e859a81e4f61e362e939f7bc2 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 deleted file mode 100644 index f0060b8a0f78f..0000000000000 --- a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.32.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d80ad3210fa890a856a1d04379d134ab44a09501 \ No newline at end of file diff --git a/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 new file mode 100644 index 0000000000000..293b82f206c99 --- /dev/null +++ b/plugins/telemetry-otel/licenses/opentelemetry-sdk-trace-1.34.1.jar.sha1 @@ -0,0 +1 @@ +121a75c2ba9ed8b80f5ff131c2411a5d460f38d0 \ No newline at end of file From 4c283a7c79b20e73ccf3378124893451197b85a3 Mon Sep 17 00:00:00 2001 From: Fahad Shami <48306098+fahadshamiinsta@users.noreply.github.com> Date: Wed, 17 Jan 2024 09:32:55 +1100 Subject: [PATCH 092/178] Support for Google Application Default Credentials (#8394) * fixed conflicts Signed-off-by: fahadshamiinsta <fshami@netapp.com> * applying spotless Java check Signed-off-by: fahadshamiinsta <fshami@netapp.com> * added a comment to test helper method Signed-off-by: fahadshamiinsta <fshami@netapp.com> * added a comment to GoogleApplicationDefaultCredentials class with document reference Signed-off-by: fahadshamiinsta <fshami@netapp.com> * to rerun gradle checks Signed-off-by: fahadshamiinsta <fshami@netapp.com> * increasing coverage by adding another test Signed-off-by: fahadshamiinsta <fshami@netapp.com> * test name change Signed-off-by: fahadshamiinsta <fshami@netapp.com> * rerun ci Signed-off-by: fahadshamiinsta <fshami@netapp.com> * rerun ci Signed-off-by: fahadshamiinsta <fshami@netapp.com> * force push to rerun ci Signed-off-by: fahadshamiinsta <fshami@netapp.com> * pushing to trigger ci checks Signed-off-by: fahadshamiinsta <fshami@netapp.com> --------- Signed-off-by: fahadshamiinsta <fshami@netapp.com> --- CHANGELOG.md | 1 + .../GoogleApplicationDefaultCredentials.java | 33 +++++ .../gcs/GoogleCloudStorageService.java | 20 ++- .../gcs/GoogleCloudStorageServiceTests.java | 132 ++++++++++++++---- 4 files changed, 157 insertions(+), 29 deletions(-) create mode 100644 plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 26d324839ae54..14055f0d8462f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/)) - Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894)) - Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728)) +- Added support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) ### Deprecated diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java new file mode 100644 index 0000000000000..5002ab9a2e704 --- /dev/null +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleApplicationDefaultCredentials.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.repositories.gcs; + +import com.google.auth.oauth2.GoogleCredentials; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.io.IOException; + +/** + * This class facilitates to fetch Application Default Credentials + * see <a href="https://cloud.google.com/docs/authentication/application-default-credentials">How Application Default Credentials works</a> + */ +public class GoogleApplicationDefaultCredentials { + private static final Logger logger = LogManager.getLogger(GoogleApplicationDefaultCredentials.class); + + public GoogleCredentials get() { + GoogleCredentials credentials = null; + try { + credentials = SocketAccess.doPrivilegedIOException(GoogleCredentials::getApplicationDefault); + } catch (IOException e) { + logger.error("Failed to retrieve \"Application Default Credentials\"", e); + } + return credentials; + } +} diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java index c9ebb3acaf3e5..83a4146c99b99 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageService.java @@ -36,6 +36,7 @@ import com.google.api.client.http.HttpRequestInitializer; import com.google.api.client.http.HttpTransport; import com.google.api.client.http.javanet.NetHttpTransport; +import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; import com.google.cloud.ServiceOptions; import com.google.cloud.http.HttpTransportOptions; @@ -70,6 +71,16 @@ public class GoogleCloudStorageService { */ private volatile Map<String, Storage> clientCache = emptyMap(); + final private GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials; + + public GoogleCloudStorageService() { + this.googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials(); + } + + public GoogleCloudStorageService(GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials) { + this.googleApplicationDefaultCredentials = googleApplicationDefaultCredentials; + } + /** * Refreshes the client settings and clears the client cache. Subsequent calls to * {@code GoogleCloudStorageService#client} will return new clients constructed @@ -213,10 +224,11 @@ StorageOptions createStorageOptions( storageOptionsBuilder.setProjectId(clientSettings.getProjectId()); } if (clientSettings.getCredential() == null) { - logger.warn( - "\"Application Default Credentials\" are not supported out of the box." - + " Additional file system permissions have to be granted to the plugin." - ); + logger.info("\"Application Default Credentials\" will be in use"); + final GoogleCredentials credentials = googleApplicationDefaultCredentials.get(); + if (credentials != null) { + storageOptionsBuilder.setCredentials(credentials); + } } else { ServiceAccountCredentials serviceAccountCredentials = clientSettings.getCredential(); // override token server URI diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java index a531555debefb..58e412684ed5a 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -33,8 +33,10 @@ package org.opensearch.repositories.gcs; import com.google.auth.Credentials; +import com.google.auth.oauth2.GoogleCredentials; import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; +import com.google.cloud.storage.StorageOptions; import org.opensearch.common.settings.MockSecureSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; @@ -42,30 +44,38 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.OpenSearchTestCase; +import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; +import java.io.IOException; +import java.net.Proxy; +import java.net.URI; +import java.net.URISyntaxException; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.util.Base64; import java.util.Locale; import java.util.UUID; +import org.mockito.Mockito; + import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class GoogleCloudStorageServiceTests extends OpenSearchTestCase { + final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); + final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String endpoint = randomFrom("http://", "https://") + + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth") + + ":" + + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + public void testClientInitializer() throws Exception { final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - final String endpoint = randomFrom("http://", "https://") - + randomFrom("www.opensearch.org", "www.googleapis.com", "localhost/api", "google.com/oauth") - + ":" - + randomIntBetween(1, 65535); - final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final Settings settings = Settings.builder() .put( GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), @@ -82,31 +92,35 @@ public void testClientInitializer() throws Exception { .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); - final GoogleCloudStorageService service = new GoogleCloudStorageService(); + GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class); + GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class); + Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials); + + final GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials); service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket"); final IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> service.client("another_client", "repo", statsCollector) ); - assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); + MatcherAssert.assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); assertSettingDeprecationsAndWarnings( new Setting<?>[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) } ); final Storage storage = service.client(clientName, "repo", statsCollector); - assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); - assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); - assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); - assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); - assertThat( + MatcherAssert.assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); + MatcherAssert.assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); + MatcherAssert.assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); + MatcherAssert.assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); + MatcherAssert.assertThat( ((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), Matchers.is((int) connectTimeValue.millis()) ); - assertThat( + MatcherAssert.assertThat( ((HttpTransportOptions) storage.getOptions().getTransportOptions()).getReadTimeout(), Matchers.is((int) readTimeValue.millis()) ); - assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); + MatcherAssert.assertThat(storage.getOptions().getCredentials(), Matchers.instanceOf(Credentials.class)); } public void testReinitClientSettings() throws Exception { @@ -122,33 +136,33 @@ public void testReinitClientSettings() throws Exception { final GoogleCloudStorageService storageService = plugin.storageService; GoogleCloudStorageOperationsStats statsCollector = new GoogleCloudStorageOperationsStats("bucket"); final Storage client11 = storageService.client("gcs1", "repo1", statsCollector); - assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); final Storage client12 = storageService.client("gcs2", "repo2", statsCollector); - assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); // client 3 is missing final IllegalArgumentException e1 = expectThrows( IllegalArgumentException.class, () -> storageService.client("gcs3", "repo3", statsCollector) ); - assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); + MatcherAssert.assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); // update client settings plugin.reload(settings2); // old client 1 not changed - assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + MatcherAssert.assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); // new client 1 is changed final Storage client21 = storageService.client("gcs1", "repo1", statsCollector); - assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); + MatcherAssert.assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); // old client 2 not changed - assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + MatcherAssert.assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); // new client2 is gone final IllegalArgumentException e2 = expectThrows( IllegalArgumentException.class, () -> storageService.client("gcs2", "repo2", statsCollector) ); - assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); + MatcherAssert.assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); // client 3 emerged final Storage client23 = storageService.client("gcs3", "repo3", statsCollector); - assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); + MatcherAssert.assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); } } @@ -193,4 +207,72 @@ public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); assertEquals(0, GoogleCloudStorageService.toTimeout(TimeValue.MINUS_ONE).intValue()); } + + /** + * The following method test the Google Application Default Credential instead of + * using service account file. + * Considered use of JUnit Mocking due to static method GoogleCredentials.getApplicationDefault + * and avoiding environment variables to set which later use GCE. + * @throws Exception + */ + public void testApplicationDefaultCredential() throws Exception { + GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials(); + GoogleCredentials mockGoogleCredentials = Mockito.mock(GoogleCredentials.class); + HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class); + GoogleApplicationDefaultCredentials mockDefaultCredentials = Mockito.mock(GoogleApplicationDefaultCredentials.class); + Mockito.when(mockDefaultCredentials.get()).thenReturn(mockGoogleCredentials); + + GoogleCloudStorageService service = new GoogleCloudStorageService(mockDefaultCredentials); + StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions); + assertNotNull(storageOptions); + assertEquals(storageOptions.getCredentials().toString(), mockGoogleCredentials.toString()); + } + + /** + * The application default credential throws exception when there are + * no Environment Variables provided or Google Compute Engine is not running + * @throws Exception + */ + public void testApplicationDefaultCredentialsWhenNoSettingProvided() throws Exception { + GoogleCloudStorageClientSettings settings = getGCSClientSettingsWithoutCredentials(); + HttpTransportOptions mockHttpTransportOptions = Mockito.mock(HttpTransportOptions.class); + GoogleCloudStorageService service = new GoogleCloudStorageService(); + StorageOptions storageOptions = service.createStorageOptions(settings, mockHttpTransportOptions); + + Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault); + assertNotNull(storageOptions); + assertNull(storageOptions.getCredentials()); + MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available")); + } + + /** + * The application default credential throws IOException when it is + * used without GoogleCloudStorageService + */ + public void testDefaultCredentialsThrowsExceptionWithoutGCStorageService() { + GoogleApplicationDefaultCredentials googleApplicationDefaultCredentials = new GoogleApplicationDefaultCredentials(); + GoogleCredentials credentials = googleApplicationDefaultCredentials.get(); + assertNull(credentials); + Exception exception = assertThrows(IOException.class, GoogleCredentials::getApplicationDefault); + MatcherAssert.assertThat(exception.getMessage(), containsString("The Application Default Credentials are not available")); + } + + /** + * This is a helper method to provide GCS Client settings without credentials + * @return GoogleCloudStorageClientSettings + * @throws URISyntaxException + */ + private GoogleCloudStorageClientSettings getGCSClientSettingsWithoutCredentials() throws URISyntaxException { + return new GoogleCloudStorageClientSettings( + null, + endpoint, + projectIdName, + connectTimeValue, + readTimeValue, + applicationName, + new URI(""), + new ProxySettings(Proxy.Type.DIRECT, null, 0, null, null) + ); + } + } From 1d8bbd5d643e483a0089243d5c7d0f7e28ac8d16 Mon Sep 17 00:00:00 2001 From: Peter Nied <petern@amazon.com> Date: Tue, 16 Jan 2024 16:55:49 -0600 Subject: [PATCH 093/178] Add TRIAGING.md for triage agenda and processes (#11814) Signed-off-by: Peter Nied <petern@amazon.com> Signed-off-by: Peter Nied <peternied@hotmail.com> --- TRIAGING.md | 83 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 TRIAGING.md diff --git a/TRIAGING.md b/TRIAGING.md new file mode 100644 index 0000000000000..47cb44a4f5ba2 --- /dev/null +++ b/TRIAGING.md @@ -0,0 +1,83 @@ +<img src="https://opensearch.org/assets/img/opensearch-logo-themed.svg" height="64px"> + +The maintainers of the OpenSearch Repo seek to promote an inclusive and engaged community of contributors. In order to facilitate this, weekly triage meetings are open-to-all and attendance is encouraged for anyone who hopes to contribute, discuss an issue, or learn more about the project. To learn more about contributing to the OpenSearch Repo visit the [Contributing](./CONTRIBUTING.md) documentation. + +### Do I need to attend for my issue to be addressed/triaged? + +Attendance is not required for your issue to be triaged or addressed. If not accepted the issue will be updated with a comment for next steps. All new issues are triaged weekly. + +You can track if your issue was triaged by watching your GitHub notifications for updates. + +### What happens if my issue does not get covered this time? + +Each meeting we seek to address all new issues. However, should we run out of time before your issue is discussed, you are always welcome to attend the next meeting or to follow up on the issue post itself. + +### How do I join the Triage meeting? + +Meetings are hosted regularly at 10:00a - 10:55a Central Time every Wednesday and can be joined via [Chime](https://aws.amazon.com/chime/), with this [meeting link](https://chime.aws/1988437365). + +After joining the Chime meeting, you can enable your video / voice to join the discussion. If you do not have a webcam or microphone available, you can still join in via the text chat. + +If you have an issue you'd like to bring forth please prepare a link to the issue so it can be presented and viewed by everyone in the meeting. + +### Is there an agenda for each week? + +Meetings are 55 minutes and follows this structure: + +Yes, each 55-minute meeting follows this structure: +1. **Initial Gathering:** Feel free to turn on your video and engage in informal conversation. Shortly, a volunteer triage [facilitator](#what-is-the-role-of-the-facilitator) will begin the meeting and share their screen. +2. **Record Attendees:** The facilitator will request attendees to share their GitHub profile links. These links will be collected and assembled into a [tag](#how-do-triage-facilitator-tag-comments-during-the-triage-meeting) to annotate comments during the meeting. +3. **Announcements:** Any announcements will be made at the beginning of the meeting. +4. **Review of New Issues:** We start by reviewing all untriaged [issues](https://github.com/search?q=label%3Auntriaged+is%3Aopen++repo%3Aopensearch-project%2FOpenSearch+&type=issues&ref=advsearch&s=created&o=desc) for the OpenSearch repo. +5. **Attendee Requests:** An opportunity for any meeting member to request consideration of an issue or pull request. +6. **Open Discussion:** Attendees can bring up any topics not already covered by filed issues or pull requests. + +### What is the role of the facilitator? + +The facilitator is crucial in driving the meeting, ensuring a smooth flow of issues into OpenSearch for future contributions. They maintain the meeting's agenda, solicit input from attendees, and record outcomes using the triage tag as items are discussed. + +### Do I need to have already contributed to the project to attend a triage meeting? + +No prior contributions are required. All interested individuals are welcome and encouraged to attend. Triage meetings offer a fantastic opportunity for new contributors to understand the project and explore various contribution avenues. + +### What if I have an issue that is almost a duplicate, should I open a new one to be triaged? + +You can always open an [issue](https://github.com/opensearch-project/OpenSearch/issues/new/choose) including one that you think may be a duplicate. If you believe your issue is similar but distinct from an existing one, you are encouraged to file it and explain the differences during the triage meeting. + +### What if I have follow-up questions on an issue? + +If you have an existing issue you would like to discuss, you can always comment on the issue itself. Alternatively, you are welcome to come to the triage meeting to discuss. + +### Is this meeting a good place to get help setting up features on my OpenSearch instance? + +While we are always happy to help the community, the best resource for implementation questions is [the OpenSearch forum](https://forum.opensearch.org/). + +There you can find answers to many common questions as well as speak with implementation experts. + +### What are the issue labels associated with triaging? + +Yes, there are several labels that are used to identify the 'state' of issues filed in OpenSearch . +| Label | When Applied | Meaning | +|---------------|----------------------|-----------------------------------------------------------------------------------------------------------------------------------------| +| `Untriaged` | When issues are created or re-opened. | Issues labeled as 'Untriaged' require the attention of the repository maintainers and may need to be prioritized for quicker resolution. It's crucial to keep the count of 'Untriaged' labels low to ensure all potential security issues are addressed in a timely manner. See [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) for more details on handling these issues. | +| `Help Wanted` | Anytime. | Issues marked as 'Help Wanted' signal that they are actionable and not the current focus of the project maintainers. Community contributions are especially encouraged for these issues. | +| `Good First Issue` | Anytime. | Issues labeled as 'Good First Issue' are small in scope and can be resolved with a single pull request. These are recommended starting points for newcomers looking to make their first contributions. | + +### What are the typical outcomes of a triaged issue? + +| Outcome | Label | Description | Canned Response | +|--------------|------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Accepted | `-untriaged` | The issue has the details needed to be directed towards area owners. | "Thanks for filing this issue, please feel free to submit a pull request." | +| Rejected | N/A | The issue will be closed with a reason for why it was rejected. Reasons might include lack of details, or being outside the scope of the project. | "Thanks for creating this issue; however, it isn't being accepted due to {REASON}. Please feel free to re-open after addressing the reason." | +| Area Triage | `+{AREALABEL}` | OpenSearch has many different areas. If it's unclear whether an issue should be accepted, it will be labeled with the area and an owner will be @mentioned for follow-up. | "Thanks for creating this issue; the triage meeting was unsure if this issue should be accepted, @{PERSON} or someone from the area please review and then accept or reject this issue?" | +| Transfer | N/A | If the issue applies to another repository within the OpenSearch Project, it will be transferred accordingly. | "@opensearch-project/triage, can you please transfer this issue to project {REPOSITORY}." Or, if someone at the meeting has permissions, they can start the transfer. | + +### Is this where I should bring up potential security vulnerabilities? + +Due to the sensitive nature of security vulnerabilities, please report all potential vulnerabilities directly by following the steps outlined on the [SECURITY.md](https://github.com/opensearch-project/OpenSearch/blob/main/SECURITY.md) document. + +### How do triage facilitator tag comments during the triage meeting? + +During the triage meeting, facilitators should use the tag _[Triage - attendees [1](#Profile_link) [2](#Profile_link)]_ to indicate a collective decision. This ensures contributors know the decision came from the meeting rather than an individual and identifies participants for any follow-up queries. + +This tag should not be used outside triage meetings. From 6d2d4ddc8b096e95b94b65142ed98437d96d8d29 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Wed, 17 Jan 2024 07:23:05 +0800 Subject: [PATCH 094/178] Add copy ingest processor (#11870) --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../ingest/common/CopyProcessor.java | 161 ++++++++ .../common/IngestCommonModulePlugin.java | 1 + .../common/CopyProcessorFactoryTests.java | 101 +++++ .../ingest/common/CopyProcessorTests.java | 125 ++++++ .../rest-api-spec/test/ingest/10_basic.yml | 17 + .../test/ingest/300_copy_processor.yml | 374 ++++++++++++++++++ .../org/opensearch/ingest/IngestDocument.java | 2 +- 8 files changed, 781 insertions(+), 1 deletion(-) create mode 100644 modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java create mode 100644 modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java create mode 100644 modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java create mode 100644 modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 14055f0d8462f..d1e970d097bfb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -126,6 +126,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add additional handling in SearchTemplateRequest when simulate is set to true ([#11591](https://github.com/opensearch-project/OpenSearch/pull/11591)) - Introduce cluster level setting `cluster.index.restrict.replication.type` to prevent replication type setting override during index creations([#11583](https://github.com/opensearch-project/OpenSearch/pull/11583)) - Add match_only_text field that is optimized for storage by trading off positional queries performance ([#6836](https://github.com/opensearch-project/OpenSearch/pull/11039)) +- Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java new file mode 100644 index 0000000000000..dec69df275130 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/CopyProcessor.java @@ -0,0 +1,161 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.core.common.Strings; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.script.ScriptService; +import org.opensearch.script.TemplateScript; + +import java.util.Map; + +public final class CopyProcessor extends AbstractProcessor { + public static final String TYPE = "copy"; + + private final TemplateScript.Factory sourceField; + private final TemplateScript.Factory targetField; + + private final boolean ignoreMissing; + + private final boolean removeSource; + + private final boolean overrideTarget; + + CopyProcessor(String tag, String description, TemplateScript.Factory sourceField, TemplateScript.Factory targetField) { + this(tag, description, sourceField, targetField, false, false, false); + } + + CopyProcessor( + String tag, + String description, + TemplateScript.Factory sourceField, + TemplateScript.Factory targetField, + boolean ignoreMissing, + boolean removeSource, + boolean overrideTarget + ) { + super(tag, description); + this.sourceField = sourceField; + this.targetField = targetField; + this.ignoreMissing = ignoreMissing; + this.removeSource = removeSource; + this.overrideTarget = overrideTarget; + } + + public TemplateScript.Factory getSourceField() { + return sourceField; + } + + public TemplateScript.Factory getTargetField() { + return targetField; + } + + public boolean isIgnoreMissing() { + return ignoreMissing; + } + + public boolean isRemoveSource() { + return removeSource; + } + + public boolean isOverrideTarget() { + return overrideTarget; + } + + @Override + public IngestDocument execute(IngestDocument document) { + String source = document.renderTemplate(sourceField); + final boolean sourceFieldPathIsNullOrEmpty = Strings.isNullOrEmpty(source); + if (sourceFieldPathIsNullOrEmpty || document.hasField(source, true) == false) { + if (ignoreMissing) { + return document; + } else if (sourceFieldPathIsNullOrEmpty) { + throw new IllegalArgumentException("source field path cannot be null nor empty"); + } else { + throw new IllegalArgumentException("source field [" + source + "] doesn't exist"); + } + } + + String target = document.renderTemplate(targetField); + if (Strings.isNullOrEmpty(target)) { + throw new IllegalArgumentException("target field path cannot be null nor empty"); + } + if (source.equals(target)) { + throw new IllegalArgumentException("source field path and target field path cannot be same"); + } + + if (overrideTarget || document.hasField(target, true) == false || document.getFieldValue(target, Object.class) == null) { + Object sourceValue = document.getFieldValue(source, Object.class); + document.setFieldValue(target, IngestDocument.deepCopy(sourceValue)); + } else { + throw new IllegalArgumentException("target field [" + target + "] already exists"); + } + + if (removeSource) { + document.removeField(source); + } + + return document; + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + private final ScriptService scriptService; + + public Factory(ScriptService scriptService) { + this.scriptService = scriptService; + } + + @Override + public CopyProcessor create( + Map<String, Processor.Factory> registry, + String processorTag, + String description, + Map<String, Object> config + ) throws Exception { + String sourceField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "source_field"); + TemplateScript.Factory sourceFieldTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "source_field", + sourceField, + scriptService + ); + String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); + TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate( + TYPE, + processorTag, + "target_field", + targetField, + scriptService + ); + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + boolean removeSource = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "remove_source", false); + boolean overrideTarget = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "override_target", false); + + return new CopyProcessor( + processorTag, + description, + sourceFieldTemplate, + targetFieldTemplate, + ignoreMissing, + removeSource, + overrideTarget + ); + } + } +} diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java index a2a51d968e078..7c1b4841122b0 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java @@ -106,6 +106,7 @@ public Map<String, Processor.Factory> getProcessors(Processor.Parameters paramet processors.put(DropProcessor.TYPE, new DropProcessor.Factory()); processors.put(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory()); processors.put(CsvProcessor.TYPE, new CsvProcessor.Factory()); + processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService)); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java new file mode 100644 index 0000000000000..c1ca86a49e334 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorFactoryTests.java @@ -0,0 +1,101 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; +import org.opensearch.ingest.TestTemplateService; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class CopyProcessorFactoryTests extends OpenSearchTestCase { + + private CopyProcessor.Factory factory; + + @Before + public void init() { + factory = new CopyProcessor.Factory(TestTemplateService.instance()); + } + + public void testCreate() throws Exception { + boolean ignoreMissing = randomBoolean(); + boolean removeSource = randomBoolean(); + boolean overrideTarget = randomBoolean(); + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "source"); + config.put("target_field", "target"); + config.put("ignore_missing", ignoreMissing); + config.put("remove_source", removeSource); + config.put("override_target", overrideTarget); + String processorTag = randomAlphaOfLength(10); + CopyProcessor copyProcessor = factory.create(null, processorTag, null, config); + assertThat(copyProcessor.getTag(), equalTo(processorTag)); + assertThat(copyProcessor.getSourceField().newInstance(Collections.emptyMap()).execute(), equalTo("source")); + assertThat(copyProcessor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("target")); + assertThat(copyProcessor.isIgnoreMissing(), equalTo(ignoreMissing)); + assertThat(copyProcessor.isRemoveSource(), equalTo(removeSource)); + assertThat(copyProcessor.isOverrideTarget(), equalTo(overrideTarget)); + } + + public void testCreateWithSourceField() throws Exception { + Map<String, Object> config = new HashMap<>(); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_field] required property is missing")); + } + + config.put("source_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[source_field] required property is missing")); + } + } + + public void testCreateWithTargetField() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "source"); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); + } + + config.put("source_field", "source"); + config.put("target_field", null); + try { + factory.create(null, null, null, config); + fail("factory create should have failed"); + } catch (OpenSearchParseException e) { + assertThat(e.getMessage(), equalTo("[target_field] required property is missing")); + } + } + + public void testInvalidMustacheTemplate() throws Exception { + CopyProcessor.Factory factory = new CopyProcessor.Factory(TestTemplateService.instance(true)); + Map<String, Object> config = new HashMap<>(); + config.put("source_field", "{{source}}"); + config.put("target_field", "target"); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows(OpenSearchException.class, () -> factory.create(null, processorTag, null, config)); + assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); + assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); + } + +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java new file mode 100644 index 0000000000000..f271bdd342d0b --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java @@ -0,0 +1,125 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.ingest.RandomDocumentPicks; +import org.opensearch.ingest.TestTemplateService; +import org.opensearch.test.OpenSearchTestCase; + +import static org.hamcrest.Matchers.equalTo; + +public class CopyProcessorTests extends OpenSearchTestCase { + + public void testCopyExistingField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class); + assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); + assertThat(ingestDocument.getFieldValue(sourceFieldName, Object.class), equalTo(sourceValue)); + + Processor processorWithEmptyTarget = createCopyProcessor(sourceFieldName, "", false, false, false); + assertThrows( + "target field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptyTarget.execute(ingestDocument) + ); + + Processor processorWithSameSourceAndTarget = createCopyProcessor(sourceFieldName, sourceFieldName, false, false, false); + assertThrows( + "source field path and target field path cannot be same", + IllegalArgumentException.class, + () -> processorWithSameSourceAndTarget.execute(ingestDocument) + ); + } + + public void testCopyWithIgnoreMissing() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Processor processor = createCopyProcessor("non-existing-field", targetFieldName, false, false, false); + assertThrows( + "source field [non-existing-field] doesn't exist", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + + Processor processorWithEmptyFieldName = createCopyProcessor("", targetFieldName, false, false, false); + assertThrows( + "source field path cannot be null nor empty", + IllegalArgumentException.class, + () -> processorWithEmptyFieldName.execute(ingestDocument) + ); + + Processor processorWithIgnoreMissing = createCopyProcessor("non-existing-field", targetFieldName, true, false, false); + processorWithIgnoreMissing.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(false)); + } + + public void testCopyWithRemoveSource() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String sourceFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + String targetFieldName = RandomDocumentPicks.randomFieldName(random()); + Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class); + + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, true, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); + assertThat(ingestDocument.hasField(sourceFieldName), equalTo(false)); + } + + public void testCopyToExistingField() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + String targetFieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); + Object sourceValue = RandomDocumentPicks.randomFieldValue(random()); + String sourceFieldName = RandomDocumentPicks.addRandomField(random(), ingestDocument, sourceValue); + + Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, false, false); + assertThrows( + "target field [" + targetFieldName + "] already exists", + IllegalArgumentException.class, + () -> processor.execute(ingestDocument) + ); + + // if override_target is false but target field's value is null, copy can execute successfully + String targetFieldWithNullValue = RandomDocumentPicks.addRandomField(random(), ingestDocument, null); + Processor processorWithTargetNullValue = createCopyProcessor(sourceFieldName, targetFieldWithNullValue, false, false, false); + processorWithTargetNullValue.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldWithNullValue), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldWithNullValue, Object.class), equalTo(sourceValue)); + + Processor processorWithOverrideTargetIsTrue = createCopyProcessor(sourceFieldName, targetFieldName, false, false, true); + processorWithOverrideTargetIsTrue.execute(ingestDocument); + assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); + assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); + } + + private static Processor createCopyProcessor( + String sourceFieldName, + String targetFieldName, + boolean ignoreMissing, + boolean removeSource, + boolean overrideTarget + ) { + return new CopyProcessor( + randomAlphaOfLength(10), + null, + new TestTemplateService.MockTemplateScript.Factory(sourceFieldName), + new TestTemplateService.MockTemplateScript.Factory(targetFieldName), + ignoreMissing, + removeSource, + overrideTarget + ); + } +} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml index f44cc1f9f9fcf..0719082c887f2 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml @@ -36,3 +36,20 @@ - contains: { nodes.$cluster_manager.ingest.processors: { type: split } } - contains: { nodes.$cluster_manager.ingest.processors: { type: trim } } - contains: { nodes.$cluster_manager.ingest.processors: { type: uppercase } } + +--- +"Copy processor exists": + - skip: + version: " - 2.11.99" + features: contains + reason: "copy processor was introduced in 2.12.0 and contains is a newly added assertion" + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + + - contains: { nodes.$cluster_manager.ingest.processors: { type: copy } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml new file mode 100644 index 0000000000000..0203b62ba67d6 --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/300_copy_processor.yml @@ -0,0 +1,374 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "1" + ignore: 404 + +--- +"Test creat copy processor": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + catch: /\[target\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "source" + } + } + ] + } + - do: + catch: /\[source\_field\] required property is missing/ + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "target_field" : "target" + } + } + ] + } + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "source", + "target_field" : "target", + "ignore_missing" : true, + "remove_source" : true, + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + +--- +"Test copy processor with ignore_missing": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "unknown_field", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field \[unknown\_field\] doesn\'t exist/ + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "unknown_field", + "target_field" : "bar", + "ignore_missing" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello" } } + +--- +"Test copy processor with remove_source": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello", bar: "hello" } } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar", + "remove_source" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello" + } + - do: + get: + index: test + id: 1 + - match: { _source: { bar: "hello" } } + +--- +"Test copy processor with override_target": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /target field \[bar\] already exists/ + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "foo", + "target_field" : "bar", + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + foo: "hello", + bar: "world" + } + - do: + get: + index: test + id: 1 + - match: { _source: { foo: "hello", bar: "hello" } } + +--- +"Test copy processor with template snippets": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12" + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field path cannot be null nor empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + target: "bar", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /target field path cannot be null nor empty/ + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}" + } + } + ] + } + - match: { acknowledged: true } + + - do: + catch: /source field path and target field path cannot be same/ + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + target: "foo", + foo: "hello", + bar: "world" + } + + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors": [ + { + "copy" : { + "source_field" : "{{source}}", + "target_field" : "{{target}}", + "override_target" : true + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "1" + body: { + source: "foo", + target: "bar", + foo: "hello", + bar: "world" + } + - do: + get: + index: test + id: 1 + - match: { _source: { source: "foo", target: "bar", foo: "hello", bar: "hello" } } diff --git a/server/src/main/java/org/opensearch/ingest/IngestDocument.java b/server/src/main/java/org/opensearch/ingest/IngestDocument.java index 10e9e64db561e..d975b0014de1f 100644 --- a/server/src/main/java/org/opensearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/opensearch/ingest/IngestDocument.java @@ -757,7 +757,7 @@ public static <K, V> Map<K, V> deepCopyMap(Map<K, V> source) { return (Map<K, V>) deepCopy(source); } - private static Object deepCopy(Object value) { + public static Object deepCopy(Object value) { if (value instanceof Map) { Map<?, ?> mapValue = (Map<?, ?>) value; Map<Object, Object> copy = new HashMap<>(mapValue.size()); From 517f091dce6a268060772f4b5740b40ce9d6ee22 Mon Sep 17 00:00:00 2001 From: Ashish <ssashish@amazon.com> Date: Wed, 17 Jan 2024 10:14:43 +0530 Subject: [PATCH 095/178] Fix flaky RemoteStoreRefreshListenerTests test #11256 (#11803) Signed-off-by: Ashish Singh <ssashish@amazon.com> --- .../index/shard/RemoteStoreRefreshListenerTests.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index 811d6a722d0f6..555284e55a0fa 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -418,7 +418,7 @@ public void testTrackerData() throws Exception { RemoteStoreRefreshListener listener = tuple.v1(); RemoteStoreStatsTrackerFactory trackerFactory = tuple.v2(); RemoteSegmentTransferTracker tracker = trackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); - assertNoLag(tracker); + assertBusy(() -> assertNoLag(tracker)); indexDocs(100, randomIntBetween(100, 200)); indexShard.refresh("test"); listener.afterRefresh(true); @@ -473,6 +473,14 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn new InternalEngineFactory() ); + RemoteSegmentTransferTracker tracker = indexShard.getRemoteStoreStatsTrackerFactory() + .getRemoteSegmentTransferTracker(indexShard.shardId()); + try { + assertBusy(() -> assertTrue(tracker.getTotalUploadsSucceeded() > 0)); + } catch (Exception e) { + assert false; + } + indexDocs(1, randomIntBetween(1, 100)); // Mock indexShard.store().directory() @@ -554,7 +562,6 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn RecoverySettings recoverySettings = mock(RecoverySettings.class); when(recoverySettings.getMinRemoteSegmentMetadataFiles()).thenReturn(10); when(shard.getRecoverySettings()).thenReturn(recoverySettings); - RemoteSegmentTransferTracker tracker = remoteStoreStatsTrackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); RemoteStoreRefreshListener refreshListener = new RemoteStoreRefreshListener(shard, emptyCheckpointPublisher, tracker); refreshListener.afterRefresh(true); return Tuple.tuple(refreshListener, remoteStoreStatsTrackerFactory); From 5dd4b61e83d9bbbdfd7fd490de4e8336bcc8e4f8 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Wed, 17 Jan 2024 21:14:10 +0800 Subject: [PATCH 096/178] Remove ingest processor supports excluding fields (#10967) * Remove ingest processor supports field patterns and excluding fields Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Format some code Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Fix test failure Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Remove the code of field pattern Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Add skip version in rest test yml Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Make fields and exclude_fields mutually exclusive when constructing RemoveProcessor Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../ingest/common/RemoveProcessor.java | 165 +++++++++++++----- .../common/RemoveProcessorFactoryTests.java | 38 ++-- .../ingest/common/RemoveProcessorTests.java | 46 +++++ .../test/ingest/290_remove_processor.yml | 40 +++++ .../opensearch/ingest/ConfigurationUtils.java | 8 + 6 files changed, 244 insertions(+), 54 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d1e970d097bfb..d083406d63518 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -103,6 +103,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) - Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) - Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) +- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967)) - [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) - [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) - [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java index a48cfd87b78c3..d01dce02fca31 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java @@ -32,6 +32,7 @@ package org.opensearch.ingest.common; +import org.opensearch.common.Nullable; import org.opensearch.core.common.Strings; import org.opensearch.index.VersionType; import org.opensearch.ingest.AbstractProcessor; @@ -42,11 +43,15 @@ import org.opensearch.script.TemplateScript; import java.util.ArrayList; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; + /** * Processor that removes existing fields. Nothing happens if the field is not present. */ @@ -55,11 +60,28 @@ public final class RemoveProcessor extends AbstractProcessor { public static final String TYPE = "remove"; private final List<TemplateScript.Factory> fields; + private final List<TemplateScript.Factory> excludeFields; private final boolean ignoreMissing; - RemoveProcessor(String tag, String description, List<TemplateScript.Factory> fields, boolean ignoreMissing) { + RemoveProcessor( + String tag, + String description, + @Nullable List<TemplateScript.Factory> fields, + @Nullable List<TemplateScript.Factory> excludeFields, + boolean ignoreMissing + ) { super(tag, description); - this.fields = new ArrayList<>(fields); + if (fields == null && excludeFields == null || fields != null && excludeFields != null) { + throw new IllegalArgumentException("ether fields and excludeFields must be set"); + } + if (fields != null) { + this.fields = new ArrayList<>(fields); + this.excludeFields = null; + } else { + this.fields = null; + this.excludeFields = new ArrayList<>(excludeFields); + } + this.ignoreMissing = ignoreMissing; } @@ -67,42 +89,76 @@ public List<TemplateScript.Factory> getFields() { return fields; } + public List<TemplateScript.Factory> getExcludeFields() { + return excludeFields; + } + @Override public IngestDocument execute(IngestDocument document) { - fields.forEach(field -> { - String path = document.renderTemplate(field); - final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path); - if (fieldPathIsNullOrEmpty || document.hasField(path) == false) { - if (ignoreMissing) { - return; - } else if (fieldPathIsNullOrEmpty) { - throw new IllegalArgumentException("field path cannot be null nor empty"); - } else { - throw new IllegalArgumentException("field [" + path + "] doesn't exist"); + if (fields != null && !fields.isEmpty()) { + fields.forEach(field -> { + String path = document.renderTemplate(field); + final boolean fieldPathIsNullOrEmpty = Strings.isNullOrEmpty(path); + if (fieldPathIsNullOrEmpty || document.hasField(path) == false) { + if (ignoreMissing) { + return; + } else if (fieldPathIsNullOrEmpty) { + throw new IllegalArgumentException("field path cannot be null nor empty"); + } else { + throw new IllegalArgumentException("field [" + path + "] doesn't exist"); + } } - } - // cannot remove _index, _version and _version_type. - if (path.equals(IngestDocument.Metadata.INDEX.getFieldName()) - || path.equals(IngestDocument.Metadata.VERSION.getFieldName()) - || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { - throw new IllegalArgumentException("cannot remove metadata field [" + path + "]"); - } - // removing _id is disallowed when there's an external version specified in the request - if (path.equals(IngestDocument.Metadata.ID.getFieldName()) - && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { - String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); - if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { - Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true); - throw new IllegalArgumentException( - "cannot remove metadata field [_id] when specifying external version for the document, version: " - + version - + ", version_type: " - + versionType - ); + + // cannot remove _index, _version and _version_type. + if (path.equals(IngestDocument.Metadata.INDEX.getFieldName()) + || path.equals(IngestDocument.Metadata.VERSION.getFieldName()) + || path.equals(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { + throw new IllegalArgumentException("cannot remove metadata field [" + path + "]"); } + // removing _id is disallowed when there's an external version specified in the request + if (path.equals(IngestDocument.Metadata.ID.getFieldName()) + && document.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName())) { + String versionType = document.getFieldValue(IngestDocument.Metadata.VERSION_TYPE.getFieldName(), String.class); + if (!Objects.equals(versionType, VersionType.toString(VersionType.INTERNAL))) { + Long version = document.getFieldValue(IngestDocument.Metadata.VERSION.getFieldName(), Long.class, true); + throw new IllegalArgumentException( + "cannot remove metadata field [_id] when specifying external version for the document, version: " + + version + + ", version_type: " + + versionType + ); + } + } + document.removeField(path); + }); + } + + if (excludeFields != null && !excludeFields.isEmpty()) { + Set<String> excludeFieldSet = new HashSet<>(); + excludeFields.forEach(field -> { + String path = document.renderTemplate(field); + // ignore the empty or null field path + if (!Strings.isNullOrEmpty(path)) { + excludeFieldSet.add(path); + } + }); + + if (!excludeFieldSet.isEmpty()) { + Set<String> existingFields = new HashSet<>(document.getSourceAndMetadata().keySet()); + Set<String> metadataFields = document.getMetadata() + .keySet() + .stream() + .map(IngestDocument.Metadata::getFieldName) + .collect(Collectors.toSet()); + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field) && !excludeFieldSet.contains(field)) { + document.removeField(field); + } + }); } - document.removeField(path); - }); + } + return document; } @@ -127,20 +183,41 @@ public RemoveProcessor create( Map<String, Object> config ) throws Exception { final List<String> fields = new ArrayList<>(); - final Object field = ConfigurationUtils.readObject(TYPE, processorTag, config, "field"); - if (field instanceof List) { - @SuppressWarnings("unchecked") - List<String> stringList = (List<String>) field; - fields.addAll(stringList); - } else { - fields.add((String) field); + final List<String> excludeFields = new ArrayList<>(); + final Object field = ConfigurationUtils.readOptionalObject(config, "field"); + final Object excludeField = ConfigurationUtils.readOptionalObject(config, "exclude_field"); + + if (field == null && excludeField == null || field != null && excludeField != null) { + throw newConfigurationException(TYPE, processorTag, "field", "ether field or exclude_field must be set"); } - final List<TemplateScript.Factory> compiledTemplates = fields.stream() - .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService)) - .collect(Collectors.toList()); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RemoveProcessor(processorTag, description, compiledTemplates, ignoreMissing); + + if (field != null) { + if (field instanceof List) { + @SuppressWarnings("unchecked") + List<String> stringList = (List<String>) field; + fields.addAll(stringList); + } else { + fields.add((String) field); + } + List<TemplateScript.Factory> fieldCompiledTemplates = fields.stream() + .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService)) + .collect(Collectors.toList()); + return new RemoveProcessor(processorTag, description, fieldCompiledTemplates, null, ignoreMissing); + } else { + if (excludeField instanceof List) { + @SuppressWarnings("unchecked") + List<String> stringList = (List<String>) excludeField; + excludeFields.addAll(stringList); + } else { + excludeFields.add((String) excludeField); + } + List<TemplateScript.Factory> excludeFieldCompiledTemplates = excludeFields.stream() + .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "exclude_field", f, scriptService)) + .collect(Collectors.toList()); + return new RemoveProcessor(processorTag, description, null, excludeFieldCompiledTemplates, ignoreMissing); + } } } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java index 66ca888a0d39f..179aef2feac0c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java @@ -41,6 +41,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -79,16 +80,6 @@ public void testCreateMultipleFields() throws Exception { ); } - public void testCreateMissingField() throws Exception { - Map<String, Object> config = new HashMap<>(); - try { - factory.create(null, null, null, config); - fail("factory create should have failed"); - } catch (OpenSearchParseException e) { - assertThat(e.getMessage(), equalTo("[field] required property is missing")); - } - } - public void testInvalidMustacheTemplate() throws Exception { RemoveProcessor.Factory factory = new RemoveProcessor.Factory(TestTemplateService.instance(true)); Map<String, Object> config = new HashMap<>(); @@ -98,4 +89,31 @@ public void testInvalidMustacheTemplate() throws Exception { assertThat(exception.getMessage(), equalTo("java.lang.RuntimeException: could not compile script")); assertThat(exception.getMetadata("opensearch.processor_tag").get(0), equalTo(processorTag)); } + + public void testCreateWithExcludeField() throws Exception { + Map<String, Object> config = new HashMap<>(); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); + assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field", "field1"); + config2.put("exclude_field", "field2"); + exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2)); + assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set")); + + Map<String, Object> config6 = new HashMap<>(); + config6.put("exclude_field", "exclude_field"); + RemoveProcessor removeProcessor = factory.create(null, processorTag, null, config6); + assertThat( + removeProcessor.getExcludeFields() + .stream() + .map(template -> template.newInstance(Collections.emptyMap()).execute()) + .collect(Collectors.toList()), + equalTo(List.of("exclude_field")) + ); + } } diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java index c138ad606d2e5..78a3d36124d45 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java @@ -38,8 +38,10 @@ import org.opensearch.ingest.Processor; import org.opensearch.ingest.RandomDocumentPicks; import org.opensearch.ingest.TestTemplateService; +import org.opensearch.script.TemplateScript; import org.opensearch.test.OpenSearchTestCase; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -57,12 +59,28 @@ public void testRemoveFields() throws Exception { randomAlphaOfLength(10), null, Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)), + null, false ); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } + public void testRemoveByExcludeFields() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("foo_3", "value"); + List<TemplateScript.Factory> excludeFields = new ArrayList<>(); + excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_1")); + excludeFields.add(new TestTemplateService.MockTemplateScript.Factory("foo_2")); + Processor processor = new RemoveProcessor(randomAlphaOfLength(10), null, null, excludeFields, false); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(true)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(true)); + assertThat(ingestDocument.hasField("foo_3"), equalTo(false)); + } + public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); @@ -183,6 +201,34 @@ public void testRemoveMetadataField() throws Exception { } } + public void testCreateRemoveProcessorWithBothFieldsAndExcludeFields() throws Exception { + assertThrows( + "ether fields and excludeFields must be set", + IllegalArgumentException.class, + () -> new RemoveProcessor(randomAlphaOfLength(10), null, null, null, false) + ); + + final List<TemplateScript.Factory> fields; + if (randomBoolean()) { + fields = new ArrayList<>(); + } else { + fields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_1")); + } + + final List<TemplateScript.Factory> excludeFields; + if (randomBoolean()) { + excludeFields = new ArrayList<>(); + } else { + excludeFields = List.of(new TestTemplateService.MockTemplateScript.Factory("foo_2")); + } + + assertThrows( + "ether fields and excludeFields must be set", + IllegalArgumentException.class, + () -> new RemoveProcessor(randomAlphaOfLength(10), null, fields, excludeFields, false) + ); + } + public void testRemoveDocumentId() throws Exception { Map<String, Object> config = new HashMap<>(); config.put("field", IngestDocument.Metadata.ID.getFieldName()); diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml index 6668b468f8edc..e120a865052b0 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/290_remove_processor.yml @@ -319,3 +319,43 @@ teardown: } - match: { docs.0.error.type: "illegal_argument_exception" } - match: { docs.0.error.reason: "cannot remove metadata field [_id] when specifying external version for the document, version: 1, version_type: external_gte" } + +# Related issue: https://github.com/opensearch-project/OpenSearch/issues/1578 +--- +"Test remove processor with exclude_field": + - skip: + version: " - 2.11.99" + reason: "exclude_field is introduced in 2.12" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove" : { + "exclude_field": "bar" + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + bar: "zoo", + zoo: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: { bar: "zoo"}} diff --git a/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java b/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java index 5185b740d90cb..a2c2137130587 100644 --- a/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java +++ b/server/src/main/java/org/opensearch/ingest/ConfigurationUtils.java @@ -387,6 +387,7 @@ private static <T> Map<String, T> readMap(String processorType, String processor /** * Returns and removes the specified property as an {@link Object} from the specified configuration map. + * If the property is missing an {@link OpenSearchParseException} is thrown */ public static Object readObject(String processorType, String processorTag, Map<String, Object> configuration, String propertyName) { Object value = configuration.remove(propertyName); @@ -396,6 +397,13 @@ public static Object readObject(String processorType, String processorTag, Map<S return value; } + /** + * Returns and removes the specified property as an {@link Object} from the specified configuration map. + */ + public static Object readOptionalObject(Map<String, Object> configuration, String propertyName) { + return configuration.remove(propertyName); + } + public static OpenSearchException newConfigurationException( String processorType, String processorTag, From d5a6f99d9c8c3e34628e4807d351001647f235c5 Mon Sep 17 00:00:00 2001 From: bowenlan-amzn <bowenlan23@gmail.com> Date: Wed, 17 Jan 2024 09:29:52 -0800 Subject: [PATCH 097/178] Apply the fast filter optimization to composite aggregation (#11505) We can do efficient DateHistogram aggregation under composite aggregations. --------- Signed-off-by: bowenlan-amzn <bowenlan23@gmail.com> --- CHANGELOG.md | 1 + .../bucket/FastFilterRewriteHelper.java | 385 ++++++++++++++++++ .../bucket/composite/CompositeAggregator.java | 98 ++++- .../bucket/composite/CompositeKey.java | 6 +- .../CompositeValuesCollectorQueue.java | 30 +- .../CompositeValuesSourceConfig.java | 2 +- .../DateHistogramValuesSourceBuilder.java | 2 +- .../bucket/composite/InternalComposite.java | 6 +- .../composite/PointsSortedDocsProducer.java | 6 +- .../composite/RoundingValuesSource.java | 24 +- .../AutoDateHistogramAggregator.java | 67 ++- .../histogram/DateHistogramAggregator.java | 60 ++- .../bucket/histogram/FilterRewriteHelper.java | 259 ------------ .../composite/CompositeAggregatorTests.java | 17 +- 14 files changed, 585 insertions(+), 378 deletions(-) create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java delete mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java diff --git a/CHANGELOG.md b/CHANGELOG.md index d083406d63518..ea96035807276 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -188,6 +188,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Performance improvement for MultiTerm Queries on Keyword fields ([#7057](https://github.com/opensearch-project/OpenSearch/issues/7057)) - Refactor common parts from the Rounding class into a separate 'round' package ([#11023](https://github.com/opensearch-project/OpenSearch/issues/11023)) - Performance improvement for date histogram aggregations without sub-aggregations ([#11083](https://github.com/opensearch-project/OpenSearch/pull/11083)) +- Apply the fast filter optimization to composite aggregation of date histogram source ([#11505](https://github.com/opensearch-project/OpenSearch/pull/11083)) - Disable concurrent aggs for Diversified Sampler and Sampler aggs ([#11087](https://github.com/opensearch-project/OpenSearch/issues/11087)) - Made leader/follower check timeout setting dynamic ([#10528](https://github.com/opensearch-project/OpenSearch/pull/10528)) - Improved performance of numeric exact-match queries ([#11209](https://github.com/opensearch-project/OpenSearch/pull/11209)) diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java new file mode 100644 index 0000000000000..f377287d0b3bd --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/FastFilterRewriteHelper.java @@ -0,0 +1,385 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.PointRangeQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.NumericUtils; +import org.opensearch.common.CheckedFunction; +import org.opensearch.common.Rounding; +import org.opensearch.common.lucene.search.function.FunctionScoreQuery; +import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.query.DateRangeIncludingNowQuery; +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.bucket.composite.CompositeKey; +import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceConfig; +import org.opensearch.search.aggregations.bucket.composite.RoundingValuesSource; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.OptionalLong; +import java.util.function.BiConsumer; +import java.util.function.Function; +import java.util.function.Supplier; + +/** + * Utility class to help rewrite aggregations into filters. + * Instead of aggregation collects documents one by one, filter may count all documents that match in one pass. + * <p> + * Currently supported rewrite: + * <ul> + * <li> date histogram : date range filter. + * Applied: DateHistogramAggregator, AutoDateHistogramAggregator, CompositeAggregator </li> + * </ul> + * + * @opensearch.internal + */ +public final class FastFilterRewriteHelper { + + private FastFilterRewriteHelper() {} + + private static final int MAX_NUM_FILTER_BUCKETS = 1024; + private static final Map<Class<?>, Function<Query, Query>> queryWrappers; + + // Initialize the wrapper map for unwrapping the query + static { + queryWrappers = new HashMap<>(); + queryWrappers.put(ConstantScoreQuery.class, q -> ((ConstantScoreQuery) q).getQuery()); + queryWrappers.put(FunctionScoreQuery.class, q -> ((FunctionScoreQuery) q).getSubQuery()); + queryWrappers.put(DateRangeIncludingNowQuery.class, q -> ((DateRangeIncludingNowQuery) q).getQuery()); + queryWrappers.put(IndexOrDocValuesQuery.class, q -> ((IndexOrDocValuesQuery) q).getIndexQuery()); + } + + /** + * Recursively unwraps query into the concrete form + * for applying the optimization + */ + private static Query unwrapIntoConcreteQuery(Query query) { + while (queryWrappers.containsKey(query.getClass())) { + query = queryWrappers.get(query.getClass()).apply(query); + } + + return query; + } + + /** + * Finds the min and max bounds of field values for the shard + */ + private static long[] getIndexBounds(final SearchContext context, final String fieldName) throws IOException { + final List<LeafReaderContext> leaves = context.searcher().getIndexReader().leaves(); + long min = Long.MAX_VALUE, max = Long.MIN_VALUE; + // Since the query does not specify bounds for aggregation, we can + // build the global min/max from local min/max within each segment + for (LeafReaderContext leaf : leaves) { + final PointValues values = leaf.reader().getPointValues(fieldName); + if (values != null) { + min = Math.min(min, NumericUtils.sortableBytesToLong(values.getMinPackedValue(), 0)); + max = Math.max(max, NumericUtils.sortableBytesToLong(values.getMaxPackedValue(), 0)); + } + } + + if (min == Long.MAX_VALUE || max == Long.MIN_VALUE) return null; + + return new long[] { min, max }; + } + + /** + * This method also acts as a pre-condition check for the optimization, + * returns null if the optimization cannot be applied + */ + public static long[] getAggregationBounds(final SearchContext context, final String fieldName) throws IOException { + final Query cq = unwrapIntoConcreteQuery(context.query()); + final long[] indexBounds = getIndexBounds(context, fieldName); + if (cq instanceof PointRangeQuery) { + final PointRangeQuery prq = (PointRangeQuery) cq; + // Ensure that the query and aggregation are on the same field + if (prq.getField().equals(fieldName)) { + return new long[] { + // Minimum bound for aggregation is the max between query and global + Math.max(NumericUtils.sortableBytesToLong(prq.getLowerPoint(), 0), indexBounds[0]), + // Maximum bound for aggregation is the min between query and global + Math.min(NumericUtils.sortableBytesToLong(prq.getUpperPoint(), 0), indexBounds[1]) }; + } + } else if (cq instanceof MatchAllDocsQuery) { + return indexBounds; + } + // Check if the top-level query (which may be a PRQ on another field) is functionally match-all + Weight weight = context.searcher().createWeight(context.query(), ScoreMode.COMPLETE_NO_SCORES, 1f); + for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { + if (weight.count(ctx) != ctx.reader().numDocs()) { + return null; + } + } + return indexBounds; + } + + /** + * Creates the date range filters for aggregations using the interval, min/max + * bounds and the rounding values + */ + private static Weight[] createFilterForAggregations( + final SearchContext context, + final long interval, + final Rounding.Prepared preparedRounding, + final String field, + final DateFieldMapper.DateFieldType fieldType, + long low, + final long high + ) throws IOException { + // Calculate the number of buckets using range and interval + long roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); + long prevRounded = roundedLow; + int bucketCount = 0; + while (roundedLow <= fieldType.convertNanosToMillis(high)) { + bucketCount++; + if (bucketCount > MAX_NUM_FILTER_BUCKETS) return null; + // Below rounding is needed as the interval could return in + // non-rounded values for something like calendar month + roundedLow = preparedRounding.round(roundedLow + interval); + if (prevRounded == roundedLow) break; // prevents getting into an infinite loop + prevRounded = roundedLow; + } + + Weight[] filters = null; + if (bucketCount > 0) { + filters = new Weight[bucketCount]; + roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); + + int i = 0; + while (i < bucketCount) { + // Calculate the lower bucket bound + final byte[] lower = new byte[8]; + NumericUtils.longToSortableBytes(i == 0 ? low : fieldType.convertRoundedMillisToNanos(roundedLow), lower, 0); + + // Calculate the upper bucket bound + roundedLow = preparedRounding.round(roundedLow + interval); + final byte[] upper = new byte[8]; + NumericUtils.longToSortableBytes(i + 1 == bucketCount ? high : + // Subtract -1 if the minimum is roundedLow as roundedLow itself + // is included in the next bucket + fieldType.convertRoundedMillisToNanos(roundedLow) - 1, upper, 0); + + filters[i++] = context.searcher().createWeight(new PointRangeQuery(field, lower, upper, 1) { + @Override + protected String toString(int dimension, byte[] value) { + return null; + } + }, ScoreMode.COMPLETE_NO_SCORES, 1); + } + } + + return filters; + } + + /** + * Context object to do fast filter optimization + */ + public static class FastFilterContext { + private Weight[] filters = null; + public AggregationType aggregationType; + + public FastFilterContext() {} + + private void setFilters(Weight[] filters) { + this.filters = filters; + } + + public void setAggregationType(AggregationType aggregationType) { + this.aggregationType = aggregationType; + } + + public boolean isRewriteable(final Object parent, final int subAggLength) { + return aggregationType.isRewriteable(parent, subAggLength); + } + + /** + * This filter build method is for date histogram aggregation type + * + * @param computeBounds get the lower and upper bound of the field in a shard search + * @param roundingFunction produce Rounding that contains interval of date range. + * Rounding is computed dynamically using the bounds in AutoDateHistogram + * @param preparedRoundingSupplier produce PreparedRounding to round values at call-time + */ + public void buildFastFilter( + SearchContext context, + CheckedFunction<DateHistogramAggregationType, long[], IOException> computeBounds, + Function<long[], Rounding> roundingFunction, + Supplier<Rounding.Prepared> preparedRoundingSupplier + ) throws IOException { + assert this.aggregationType instanceof DateHistogramAggregationType; + DateHistogramAggregationType aggregationType = (DateHistogramAggregationType) this.aggregationType; + DateFieldMapper.DateFieldType fieldType = aggregationType.getFieldType(); + final long[] bounds = computeBounds.apply(aggregationType); + if (bounds == null) return; + + final Rounding rounding = roundingFunction.apply(bounds); + final OptionalLong intervalOpt = Rounding.getInterval(rounding); + if (intervalOpt.isEmpty()) return; + final long interval = intervalOpt.getAsLong(); + + // afterKey is the last bucket key in previous response, while the bucket key + // is the start of the bucket values, so add the interval + if (aggregationType instanceof CompositeAggregationType && ((CompositeAggregationType) aggregationType).afterKey != -1) { + bounds[0] = ((CompositeAggregationType) aggregationType).afterKey + interval; + } + + final Weight[] filters = FastFilterRewriteHelper.createFilterForAggregations( + context, + interval, + preparedRoundingSupplier.get(), + fieldType.name(), + fieldType, + bounds[0], + bounds[1] + ); + this.setFilters(filters); + } + } + + /** + * Different types have different pre-conditions, filter building logic, etc. + */ + public interface AggregationType { + boolean isRewriteable(Object parent, int subAggLength); + } + + /** + * For date histogram aggregation + */ + public static class DateHistogramAggregationType implements AggregationType { + private final MappedFieldType fieldType; + private final boolean missing; + private final boolean hasScript; + + public DateHistogramAggregationType(MappedFieldType fieldType, boolean missing, boolean hasScript) { + this.fieldType = fieldType; + this.missing = missing; + this.hasScript = hasScript; + } + + @Override + public boolean isRewriteable(Object parent, int subAggLength) { + if (parent == null && subAggLength == 0 && !missing && !hasScript) { + return fieldType != null && fieldType instanceof DateFieldMapper.DateFieldType; + } + return false; + } + + public DateFieldMapper.DateFieldType getFieldType() { + assert fieldType instanceof DateFieldMapper.DateFieldType; + return (DateFieldMapper.DateFieldType) fieldType; + } + } + + /** + * For composite aggregation with date histogram as a source + */ + public static class CompositeAggregationType extends DateHistogramAggregationType { + private final RoundingValuesSource valuesSource; + private long afterKey = -1L; + private final int size; + + public CompositeAggregationType( + CompositeValuesSourceConfig[] sourceConfigs, + CompositeKey rawAfterKey, + List<DocValueFormat> formats, + int size + ) { + super(sourceConfigs[0].fieldType(), sourceConfigs[0].missingBucket(), sourceConfigs[0].hasScript()); + this.valuesSource = (RoundingValuesSource) sourceConfigs[0].valuesSource(); + this.size = size; + if (rawAfterKey != null) { + assert rawAfterKey.size() == 1 && formats.size() == 1; + this.afterKey = formats.get(0).parseLong(rawAfterKey.get(0).toString(), false, () -> { + throw new IllegalArgumentException("now() is not supported in [after] key"); + }); + } + } + + public Rounding getRounding() { + return valuesSource.getRounding(); + } + + public Rounding.Prepared getRoundingPreparer() { + return valuesSource.getPreparedRounding(); + } + } + + public static boolean isCompositeAggRewriteable(CompositeValuesSourceConfig[] sourceConfigs) { + return sourceConfigs.length == 1 && sourceConfigs[0].valuesSource() instanceof RoundingValuesSource; + } + + public static long getBucketOrd(long bucketOrd) { + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + } + + return bucketOrd; + } + + /** + * This is executed for each segment by passing the leaf reader context + * + * @param incrementDocCount takes in the bucket key value and the bucket count + */ + public static boolean tryFastFilterAggregation( + final LeafReaderContext ctx, + FastFilterContext fastFilterContext, + final BiConsumer<Long, Integer> incrementDocCount + ) throws IOException { + if (fastFilterContext == null) return false; + if (fastFilterContext.filters == null) return false; + + final Weight[] filters = fastFilterContext.filters; + final int[] counts = new int[filters.length]; + int i; + for (i = 0; i < filters.length; i++) { + counts[i] = filters[i].count(ctx); + if (counts[i] == -1) { + // Cannot use the optimization if any of the counts + // is -1 indicating the segment might have deleted documents + return false; + } + } + + int s = 0; + int size = Integer.MAX_VALUE; + for (i = 0; i < filters.length; i++) { + if (counts[i] > 0) { + long bucketKey = i; // the index of filters is the key for filters aggregation + if (fastFilterContext.aggregationType instanceof DateHistogramAggregationType) { + final DateFieldMapper.DateFieldType fieldType = ((DateHistogramAggregationType) fastFilterContext.aggregationType) + .getFieldType(); + bucketKey = fieldType.convertNanosToMillis( + NumericUtils.sortableBytesToLong(((PointRangeQuery) filters[i].getQuery()).getLowerPoint(), 0) + ); + if (fastFilterContext.aggregationType instanceof CompositeAggregationType) { + size = ((CompositeAggregationType) fastFilterContext.aggregationType).size; + } + } + incrementDocCount.accept(bucketKey, counts[i]); + s++; + if (s > size) return true; + } + } + + return true; + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java index 317c2a357bac5..822b8a6c4b118 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -56,7 +56,9 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.search.comparators.LongComparator; import org.apache.lucene.util.Bits; +import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.RoaringDocIdSet; +import org.opensearch.common.Rounding; import org.opensearch.common.lease.Releasables; import org.opensearch.index.IndexSortConfig; import org.opensearch.lucene.queries.SearchAfterSortedDocQuery; @@ -71,7 +73,9 @@ import org.opensearch.search.aggregations.MultiBucketCollector; import org.opensearch.search.aggregations.MultiBucketConsumerService; import org.opensearch.search.aggregations.bucket.BucketsAggregator; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.missing.MissingOrder; +import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; import org.opensearch.search.internal.SearchContext; import org.opensearch.search.searchafter.SearchAfterBuilder; import org.opensearch.search.sort.SortAndFormats; @@ -80,6 +84,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.LongUnaryOperator; @@ -111,6 +116,10 @@ final class CompositeAggregator extends BucketsAggregator { private boolean earlyTerminated; + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; + private LongKeyedBucketOrds bucketOrds = null; + private Rounding.Prepared preparedRounding = null; + CompositeAggregator( String name, AggregatorFactories factories, @@ -154,12 +163,33 @@ final class CompositeAggregator extends BucketsAggregator { } this.queue = new CompositeValuesCollectorQueue(context.bigArrays(), sources, size, rawAfterKey); this.rawAfterKey = rawAfterKey; + + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(); + if (!FastFilterRewriteHelper.isCompositeAggRewriteable(sourceConfigs)) return; + fastFilterContext.setAggregationType( + new FastFilterRewriteHelper.CompositeAggregationType(sourceConfigs, rawAfterKey, formats, size) + ); + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + // bucketOrds is the data structure for saving date histogram results + bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), CardinalityUpperBound.ONE); + // Currently the filter rewrite is only supported for date histograms + FastFilterRewriteHelper.CompositeAggregationType aggregationType = + (FastFilterRewriteHelper.CompositeAggregationType) fastFilterContext.aggregationType; + preparedRounding = aggregationType.getRoundingPreparer(); + fastFilterContext.buildFastFilter( + context, + fc -> FastFilterRewriteHelper.getAggregationBounds(context, fc.getFieldType().name()), + x -> aggregationType.getRounding(), + () -> preparedRounding + ); + } } @Override protected void doClose() { try { Releasables.close(queue); + Releasables.close(bucketOrds); } finally { Releasables.close(sources); } @@ -187,12 +217,14 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I } int num = Math.min(size, queue.size()); - final InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; + InternalComposite.InternalBucket[] buckets = new InternalComposite.InternalBucket[num]; + long[] bucketOrdsToCollect = new long[queue.size()]; for (int i = 0; i < queue.size(); i++) { bucketOrdsToCollect[i] = i; } InternalAggregations[] subAggsForBuckets = buildSubAggsForBuckets(bucketOrdsToCollect); + while (queue.size() > 0) { int slot = queue.pop(); CompositeKey key = queue.toCompositeKey(slot); @@ -208,6 +240,43 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I aggs ); } + + // Build results from fast filters optimization + if (bucketOrds != null) { + // CompositeKey is the value of bucket key + final Map<CompositeKey, InternalComposite.InternalBucket> bucketMap = new HashMap<>(); + // Some segments may not be optimized, so buckets may contain results from the queue. + for (InternalComposite.InternalBucket internalBucket : buckets) { + bucketMap.put(internalBucket.getRawKey(), internalBucket); + } + // Loop over the buckets in the bucketOrds, and populate the map accordingly + LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(0); + while (ordsEnum.next()) { + Long bucketKeyValue = ordsEnum.value(); + CompositeKey key = new CompositeKey(bucketKeyValue); + if (bucketMap.containsKey(key)) { + long docCount = bucketDocCount(ordsEnum.ord()) + bucketMap.get(key).getDocCount(); + bucketMap.get(key).setDocCount(docCount); + } else { + InternalComposite.InternalBucket bucket = new InternalComposite.InternalBucket( + sourceNames, + formats, + key, + reverseMuls, + missingOrders, + bucketDocCount(ordsEnum.ord()), + buildEmptySubAggregations() + ); + bucketMap.put(key, bucket); + } + } + // since a map is not sorted structure, sort it before transform back to buckets + List<InternalComposite.InternalBucket> bucketList = new ArrayList<>(bucketMap.values()); + CollectionUtil.introSort(bucketList, InternalComposite.InternalBucket::compareKey); + buckets = bucketList.subList(0, Math.min(size, bucketList.size())).toArray(InternalComposite.InternalBucket[]::new); + num = buckets.length; + } + CompositeKey lastBucket = num > 0 ? buckets[num - 1].getRawKey() : null; return new InternalAggregation[] { new InternalComposite( @@ -296,7 +365,7 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException if (indexSortField.getReverse() != (source.reverseMul == -1)) { if (i == 0) { - // the leading index sort matches the leading source field but the order is reversed + // the leading index sort matches the leading source field, but the order is reversed, // so we don't check the other sources. return new Sort(indexSortField); } @@ -304,8 +373,8 @@ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException } sortFields.add(indexSortField); if (sourceConfig.valuesSource() instanceof RoundingValuesSource) { - // the rounding "squashes" many values together, that breaks the ordering of sub-values - // so we ignore subsequent source even if they match the index sort. + // the rounding "squashes" many values together, that breaks the ordering of sub-values, + // so we ignore the subsequent sources even if they match the index sort. break; } } @@ -448,6 +517,16 @@ private void processLeafFromQuery(LeafReaderContext ctx, Sort indexSortPrefix) t @Override protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(bucketOrds.add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); + finishLeaf(); boolean fillDocIdSet = deferredCollectors != NO_OP_COLLECTOR; @@ -477,9 +556,10 @@ protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucket docIdSetBuilder = new RoaringDocIdSet.Builder(ctx.reader().maxDoc()); } if (rawAfterKey != null && sortPrefixLen > 0) { - // We have an after key and index sort is applicable so we jump directly to the doc - // that is after the index sort prefix using the rawAfterKey and we start collecting - // document from there. + // We have an after key and index sort is applicable, so we jump directly to the doc + // after the index sort prefix using the rawAfterKey and we start collecting + // documents from there. + assert indexSortPrefix != null; processLeafFromQuery(ctx, indexSortPrefix); throw new CollectionTerminatedException(); } else { @@ -507,6 +587,8 @@ public void collect(int doc, long bucket) throws IOException { try { long docCount = docCountProvider.getDocCount(doc); if (queue.addIfCompetitive(indexSortPrefix, docCount)) { + // one doc may contain multiple values, we iterate over and collect one by one + // so the same doc can appear multiple times here if (builder != null && lastDoc != doc) { builder.add(doc); lastDoc = doc; @@ -569,7 +651,7 @@ private LeafBucketCollector getSecondPassCollector(LeafBucketCollector subCollec @Override public void collect(int doc, long zeroBucket) throws IOException { assert zeroBucket == 0; - Integer slot = queue.compareCurrent(); + Integer slot = queue.getCurrentSlot(); if (slot != null) { // The candidate key is a top bucket. // We can defer the collection of this document/bucket to the sub collector diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java index 5ddeb22d33a6f..338ebdc66eef7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeKey.java @@ -44,7 +44,7 @@ * * @opensearch.internal */ -class CompositeKey implements Writeable { +public class CompositeKey implements Writeable { private final Comparable[] values; CompositeKey(Comparable... values) { @@ -64,11 +64,11 @@ Comparable[] values() { return values; } - int size() { + public int size() { return values.length; } - Comparable get(int pos) { + public Comparable get(int pos) { assert pos < values.length; return values[pos]; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java index 6ee1682a7b196..2c4d451322bca 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueue.java @@ -47,6 +47,8 @@ /** * A specialized {@link PriorityQueue} implementation for composite buckets. + * Can think of this as a max heap that holds the top small buckets slots in order. + * Each slot holds the values of the composite bucket key it represents. * * @opensearch.internal */ @@ -77,7 +79,7 @@ public int hashCode() { private final BigArrays bigArrays; private final int maxSize; - private final Map<Slot, Integer> map; + private final Map<Slot, Integer> map; // to quickly find the slot for a value private final SingleDimensionValuesSource<?>[] arrays; private LongArray docCounts; @@ -108,7 +110,7 @@ public int hashCode() { @Override protected boolean lessThan(Integer a, Integer b) { - return compare(a, b) > 0; + return compare(a, b) > 0; // max heap } /** @@ -119,10 +121,10 @@ boolean isFull() { } /** - * Compares the current candidate with the values in the queue and returns + * Try to get the slot of the current/candidate values in the queue and returns * the slot if the candidate is already in the queue or null if the candidate is not present. */ - Integer compareCurrent() { + Integer getCurrentSlot() { return map.get(new Slot(CANDIDATE_SLOT)); } @@ -281,32 +283,34 @@ boolean addIfCompetitive(long inc) { */ boolean addIfCompetitive(int indexSortSourcePrefix, long inc) { // checks if the candidate key is competitive - Integer topSlot = compareCurrent(); - if (topSlot != null) { + Integer curSlot = getCurrentSlot(); + if (curSlot != null) { // this key is already in the top N, skip it - docCounts.increment(topSlot, inc); + docCounts.increment(curSlot, inc); return true; } + if (afterKeyIsSet) { int cmp = compareCurrentWithAfter(); if (cmp <= 0) { if (indexSortSourcePrefix < 0 && cmp == indexSortSourcePrefix) { - // the leading index sort is in the reverse order of the leading source + // the leading index sort is and the leading source order are both reversed, // so we can early terminate when we reach a document that is smaller // than the after key (collected on a previous page). throw new CollectionTerminatedException(); } - // key was collected on a previous page, skip it (>= afterKey). + // the key was collected on a previous page, skip it. return false; } } + + // the heap is full, check if the candidate key larger than max heap top if (size() >= maxSize) { - // the tree map is full, check if the candidate key should be kept int cmp = compare(CANDIDATE_SLOT, top()); if (cmp > 0) { if (cmp <= indexSortSourcePrefix) { - // index sort guarantees that there is no key greater or equal than the - // current one in the subsequent documents so we can early terminate. + // index sort guarantees the following documents will have a key larger than the current candidate, + // so we can early terminate. throw new CollectionTerminatedException(); } // the candidate key is not competitive, skip it. @@ -324,7 +328,7 @@ boolean addIfCompetitive(int indexSortSourcePrefix, long inc) { } else { newSlot = size(); } - // move the candidate key to its new slot + // move the candidate key to its new slot by copy its values to the new slot copyCurrent(newSlot, inc); map.put(new Slot(newSlot), newSlot); add(newSlot); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java index 788a4ddc15374..5289b3a34ab34 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeValuesSourceConfig.java @@ -156,7 +156,7 @@ public MissingOrder missingOrder() { /** * Returns true if the source contains a script that can change the value. */ - protected boolean hasScript() { + public boolean hasScript() { return hasScript; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java index fd94ba355238a..3926ce9bbecb7 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/DateHistogramValuesSourceBuilder.java @@ -298,7 +298,7 @@ public static void register(ValuesSourceRegistry.Builder builder) { // TODO once composite is plugged in to the values source registry or at least understands Date values source types use it // here Rounding.Prepared preparedRounding = rounding.prepareForUnknown(); - RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding); + RoundingValuesSource vs = new RoundingValuesSource(numeric, preparedRounding, rounding); // is specified in the builder. final DocValueFormat docValueFormat = format == null ? DocValueFormat.RAW : valuesSourceConfig.format(); final MappedFieldType fieldType = valuesSourceConfig.fieldType(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java index 9f8a4cff5f3fc..43f1ad32a66f4 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/InternalComposite.java @@ -339,7 +339,7 @@ public static class InternalBucket extends InternalMultiBucketAggregation.Intern KeyComparable<InternalBucket> { private final CompositeKey key; - private final long docCount; + private long docCount; private final InternalAggregations aggregations; private final transient int[] reverseMuls; private final transient MissingOrder[] missingOrders; @@ -436,6 +436,10 @@ public long getDocCount() { return docCount; } + public void setDocCount(long docCount) { + this.docCount = docCount; + } + @Override public Aggregations getAggregations() { return aggregations; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java index 3d6730203b6ae..dc130eb54c0ea 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/PointsSortedDocsProducer.java @@ -68,6 +68,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade // no value for the field return DocIdSet.EMPTY; } + long lowerBucket = Long.MIN_VALUE; Comparable lowerValue = queue.getLowerValueLeadSource(); if (lowerValue != null) { @@ -76,7 +77,6 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade } lowerBucket = (Long) lowerValue; } - long upperBucket = Long.MAX_VALUE; Comparable upperValue = queue.getUpperValueLeadSource(); if (upperValue != null) { @@ -85,6 +85,7 @@ DocIdSet processLeaf(Query query, CompositeValuesCollectorQueue queue, LeafReade } upperBucket = (Long) upperValue; } + DocIdSetBuilder builder = fillDocIdSet ? new DocIdSetBuilder(context.reader().maxDoc(), values, field) : null; Visitor visitor = new Visitor(context, queue, builder, values.getBytesPerDimension(), lowerBucket, upperBucket); try { @@ -146,6 +147,7 @@ public void visit(int docID, byte[] packedValue) throws IOException { } long bucket = bucketFunction.applyAsLong(packedValue); + // process previous bucket when new bucket appears if (first == false && bucket != lastBucket) { final DocIdSet docIdSet = bucketDocsBuilder.build(); if (processBucket(queue, context, docIdSet.iterator(), lastBucket, builder) && @@ -182,13 +184,13 @@ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue return PointValues.Relation.CELL_OUTSIDE_QUERY; } } - if (upperBucket != Long.MAX_VALUE) { long minBucket = bucketFunction.applyAsLong(minPackedValue); if (minBucket > upperBucket) { return PointValues.Relation.CELL_OUTSIDE_QUERY; } } + return PointValues.Relation.CELL_CROSSES_QUERY; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java index 89315724ff9ed..3f5cf919f1755 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/RoundingValuesSource.java @@ -47,17 +47,19 @@ * * @opensearch.internal */ -class RoundingValuesSource extends ValuesSource.Numeric { +public class RoundingValuesSource extends ValuesSource.Numeric { private final ValuesSource.Numeric vs; - private final Rounding.Prepared rounding; + private final Rounding.Prepared preparedRounding; + private final Rounding rounding; /** - * - * @param vs The original values source - * @param rounding How to round the values + * @param vs The original values source + * @param preparedRounding How to round the values + * @param rounding The rounding strategy */ - RoundingValuesSource(Numeric vs, Rounding.Prepared rounding) { + RoundingValuesSource(Numeric vs, Rounding.Prepared preparedRounding, Rounding rounding) { this.vs = vs; + this.preparedRounding = preparedRounding; this.rounding = rounding; } @@ -71,8 +73,16 @@ public boolean isBigInteger() { return false; } + public Rounding.Prepared getPreparedRounding() { + return preparedRounding; + } + + public Rounding getRounding() { + return rounding; + } + public long round(long value) { - return rounding.round(value); + return preparedRounding.round(value); } @Override diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java index a71c15d551927..0ea820abbedf4 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -33,8 +33,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.apache.lucene.util.CollectionUtil; import org.opensearch.common.Rounding; import org.opensearch.common.Rounding.Prepared; @@ -42,7 +42,6 @@ import org.opensearch.common.util.IntArray; import org.opensearch.common.util.LongArray; import org.opensearch.core.common.util.ByteArray; -import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.AggregatorFactories; @@ -53,6 +52,7 @@ import org.opensearch.search.aggregations.LeafBucketCollectorBase; import org.opensearch.search.aggregations.bucket.DeferableBucketAggregator; import org.opensearch.search.aggregations.bucket.DeferringBucketCollector; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.MergingBucketsDeferringCollector; import org.opensearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; @@ -127,14 +127,14 @@ static AutoDateHistogramAggregator build( * {@link MergingBucketsDeferringCollector#mergeBuckets(long[])}. */ private MergingBucketsDeferringCollector deferringCollector; - private final Weight[] filters; - private final DateFieldMapper.DateFieldType fieldType; protected final RoundingInfo[] roundingInfos; protected final int targetBuckets; protected int roundingIdx; protected Rounding.Prepared preparedRounding; + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; + private AutoDateHistogramAggregator( String name, AggregatorFactories factories, @@ -156,23 +156,23 @@ private AutoDateHistogramAggregator( this.roundingPreparer = roundingPreparer; this.preparedRounding = prepareRounding(0); - FilterRewriteHelper.FilterContext filterContext = FilterRewriteHelper.buildFastFilterContext( - parent(), - subAggregators.length, - context, - b -> getMinimumRounding(b[0], b[1]), - // Passing prepared rounding as supplier to ensure the correct prepared - // rounding is set as it is done during getMinimumRounding - () -> preparedRounding, - valuesSourceConfig, - fc -> FilterRewriteHelper.getAggregationBounds(context, fc.field()) + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(); + fastFilterContext.setAggregationType( + new FastFilterRewriteHelper.DateHistogramAggregationType( + valuesSourceConfig.fieldType(), + valuesSourceConfig.missing() != null, + valuesSourceConfig.script() != null + ) ); - if (filterContext != null) { - fieldType = filterContext.fieldType; - filters = filterContext.filters; - } else { - fieldType = null; - filters = null; + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + fastFilterContext.buildFastFilter( + context, + fc -> FastFilterRewriteHelper.getAggregationBounds(context, fc.getFieldType().name()), + b -> getMinimumRounding(b[0], b[1]), + // Passing prepared rounding as supplier to ensure the correct prepared + // rounding is set as it is done during getMinimumRounding + () -> preparedRounding + ); } } @@ -226,28 +226,21 @@ public final LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBuc return LeafBucketCollector.NO_OP_COLLECTOR; } + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(getBucketOrds().add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); + final SortedNumericDocValues values = valuesSource.longValues(ctx); final LeafBucketCollector iteratingCollector = getLeafCollector(values, sub); - - // Need to be declared as final and array for usage within the - // LeafBucketCollectorBase subclass below - final boolean[] useOpt = new boolean[1]; - useOpt[0] = filters != null; - return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { - // Try fast filter aggregation if the filters have been created - // Skip if tried before and gave incorrect/incomplete results - if (useOpt[0]) { - useOpt[0] = FilterRewriteHelper.tryFastFilterAggregation(ctx, filters, fieldType, (key, count) -> { - incrementBucketDocCount( - FilterRewriteHelper.getBucketOrd(getBucketOrds().add(owningBucketOrd, preparedRounding.round(key))), - count - ); - }); - } - iteratingCollector.collect(doc, owningBucketOrd); } }; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index 8437e1dce9fe0..b95bd093b82a6 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -33,13 +33,12 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; import org.apache.lucene.util.CollectionUtil; import org.opensearch.common.Nullable; import org.opensearch.common.Rounding; import org.opensearch.common.lease.Releasables; -import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.AggregatorFactories; @@ -49,8 +48,8 @@ import org.opensearch.search.aggregations.LeafBucketCollector; import org.opensearch.search.aggregations.LeafBucketCollectorBase; import org.opensearch.search.aggregations.bucket.BucketsAggregator; +import org.opensearch.search.aggregations.bucket.FastFilterRewriteHelper; import org.opensearch.search.aggregations.bucket.terms.LongKeyedBucketOrds; -import org.opensearch.search.aggregations.support.FieldContext; import org.opensearch.search.aggregations.support.ValuesSource; import org.opensearch.search.aggregations.support.ValuesSourceConfig; import org.opensearch.search.internal.SearchContext; @@ -81,9 +80,9 @@ class DateHistogramAggregator extends BucketsAggregator implements SizedBucketAg private final long minDocCount; private final LongBounds extendedBounds; private final LongBounds hardBounds; - private final Weight[] filters; private final LongKeyedBucketOrds bucketOrds; - private final DateFieldMapper.DateFieldType fieldType; + + private final FastFilterRewriteHelper.FastFilterContext fastFilterContext; DateHistogramAggregator( String name, @@ -116,26 +115,21 @@ class DateHistogramAggregator extends BucketsAggregator implements SizedBucketAg bucketOrds = LongKeyedBucketOrds.build(context.bigArrays(), cardinality); - FilterRewriteHelper.FilterContext filterContext = FilterRewriteHelper.buildFastFilterContext( - parent, - subAggregators.length, - context, - x -> rounding, - () -> preparedRounding, - valuesSourceConfig, - this::computeBounds + fastFilterContext = new FastFilterRewriteHelper.FastFilterContext(); + fastFilterContext.setAggregationType( + new FastFilterRewriteHelper.DateHistogramAggregationType( + valuesSourceConfig.fieldType(), + valuesSourceConfig.missing() != null, + valuesSourceConfig.script() != null + ) ); - if (filterContext != null) { - fieldType = filterContext.fieldType; - filters = filterContext.filters; - } else { - filters = null; - fieldType = null; + if (fastFilterContext.isRewriteable(parent, subAggregators.length)) { + fastFilterContext.buildFastFilter(context, this::computeBounds, x -> rounding, () -> preparedRounding); } } - private long[] computeBounds(final FieldContext fieldContext) throws IOException { - final long[] bounds = FilterRewriteHelper.getAggregationBounds(context, fieldContext.field()); + private long[] computeBounds(final FastFilterRewriteHelper.DateHistogramAggregationType fieldContext) throws IOException { + final long[] bounds = FastFilterRewriteHelper.getAggregationBounds(context, fieldContext.getFieldType().name()); if (bounds != null) { // Update min/max limit if user specified any hard bounds if (hardBounds != null) { @@ -160,26 +154,20 @@ public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCol return LeafBucketCollector.NO_OP_COLLECTOR; } - // Need to be declared as final and array for usage within the - // LeafBucketCollectorBase subclass below - final boolean[] useOpt = new boolean[1]; - useOpt[0] = filters != null; + boolean optimized = FastFilterRewriteHelper.tryFastFilterAggregation( + ctx, + fastFilterContext, + (key, count) -> incrementBucketDocCount( + FastFilterRewriteHelper.getBucketOrd(bucketOrds.add(0, preparedRounding.round(key))), + count + ) + ); + if (optimized) throw new CollectionTerminatedException(); SortedNumericDocValues values = valuesSource.longValues(ctx); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long owningBucketOrd) throws IOException { - // Try fast filter aggregation if the filters have been created - // Skip if tried before and gave incorrect/incomplete results - if (useOpt[0]) { - useOpt[0] = FilterRewriteHelper.tryFastFilterAggregation(ctx, filters, fieldType, (key, count) -> { - incrementBucketDocCount( - FilterRewriteHelper.getBucketOrd(bucketOrds.add(owningBucketOrd, preparedRounding.round(key))), - count - ); - }); - } - if (values.advanceExact(doc)) { int valuesCount = values.docValueCount(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java deleted file mode 100644 index 29cecd5b382cd..0000000000000 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/histogram/FilterRewriteHelper.java +++ /dev/null @@ -1,259 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.search.aggregations.bucket.histogram; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.search.CollectionTerminatedException; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.IndexOrDocValuesQuery; -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.PointRangeQuery; -import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Weight; -import org.apache.lucene.util.NumericUtils; -import org.opensearch.common.CheckedFunction; -import org.opensearch.common.Rounding; -import org.opensearch.common.lucene.search.function.FunctionScoreQuery; -import org.opensearch.index.mapper.DateFieldMapper; -import org.opensearch.index.query.DateRangeIncludingNowQuery; -import org.opensearch.search.aggregations.support.FieldContext; -import org.opensearch.search.aggregations.support.ValuesSourceConfig; -import org.opensearch.search.internal.SearchContext; - -import java.io.IOException; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.OptionalLong; -import java.util.function.BiConsumer; -import java.util.function.Function; -import java.util.function.Supplier; - -/** - * Helpers functions to rewrite and optimize aggregations using - * range filter queries - * - * @opensearch.internal - */ -public class FilterRewriteHelper { - - static class FilterContext { - final DateFieldMapper.DateFieldType fieldType; - final Weight[] filters; - - public FilterContext(DateFieldMapper.DateFieldType fieldType, Weight[] filters) { - this.fieldType = fieldType; - this.filters = filters; - } - } - - private static final int MAX_NUM_FILTER_BUCKETS = 1024; - private static final Map<Class<?>, Function<Query, Query>> queryWrappers; - - // Initialize the wrappers map for unwrapping the query - static { - queryWrappers = new HashMap<>(); - queryWrappers.put(ConstantScoreQuery.class, q -> ((ConstantScoreQuery) q).getQuery()); - queryWrappers.put(FunctionScoreQuery.class, q -> ((FunctionScoreQuery) q).getSubQuery()); - queryWrappers.put(DateRangeIncludingNowQuery.class, q -> ((DateRangeIncludingNowQuery) q).getQuery()); - queryWrappers.put(IndexOrDocValuesQuery.class, q -> ((IndexOrDocValuesQuery) q).getIndexQuery()); - } - - /** - * Recursively unwraps query into the concrete form - * for applying the optimization - */ - private static Query unwrapIntoConcreteQuery(Query query) { - while (queryWrappers.containsKey(query.getClass())) { - query = queryWrappers.get(query.getClass()).apply(query); - } - - return query; - } - - /** - * Finds the min and max bounds for segments within the passed search context - */ - private static long[] getIndexBoundsFromLeaves(final SearchContext context, final String fieldName) throws IOException { - final List<LeafReaderContext> leaves = context.searcher().getIndexReader().leaves(); - long min = Long.MAX_VALUE, max = Long.MIN_VALUE; - // Since the query does not specify bounds for aggregation, we can - // build the global min/max from local min/max within each segment - for (LeafReaderContext leaf : leaves) { - final PointValues values = leaf.reader().getPointValues(fieldName); - if (values != null) { - min = Math.min(min, NumericUtils.sortableBytesToLong(values.getMinPackedValue(), 0)); - max = Math.max(max, NumericUtils.sortableBytesToLong(values.getMaxPackedValue(), 0)); - } - } - - if (min == Long.MAX_VALUE || max == Long.MIN_VALUE) return null; - - return new long[] { min, max }; - } - - static long[] getAggregationBounds(final SearchContext context, final String fieldName) throws IOException { - final Query cq = unwrapIntoConcreteQuery(context.query()); - final long[] indexBounds = getIndexBoundsFromLeaves(context, fieldName); - if (cq instanceof PointRangeQuery) { - final PointRangeQuery prq = (PointRangeQuery) cq; - // Ensure that the query and aggregation are on the same field - if (prq.getField().equals(fieldName)) { - return new long[] { - // Minimum bound for aggregation is the max between query and global - Math.max(NumericUtils.sortableBytesToLong(prq.getLowerPoint(), 0), indexBounds[0]), - // Maximum bound for aggregation is the min between query and global - Math.min(NumericUtils.sortableBytesToLong(prq.getUpperPoint(), 0), indexBounds[1]) }; - } - } else if (cq instanceof MatchAllDocsQuery) { - return indexBounds; - } - - return null; - } - - /** - * Creates the range query filters for aggregations using the interval, min/max - * bounds and the rounding values - */ - private static Weight[] createFilterForAggregations( - final SearchContext context, - final Rounding rounding, - final Rounding.Prepared preparedRounding, - final String field, - final DateFieldMapper.DateFieldType fieldType, - final long low, - final long high - ) throws IOException { - final OptionalLong intervalOpt = Rounding.getInterval(rounding); - if (intervalOpt.isEmpty()) { - return null; - } - - final long interval = intervalOpt.getAsLong(); - // Calculate the number of buckets using range and interval - long roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); - long prevRounded = roundedLow; - int bucketCount = 0; - while (roundedLow <= fieldType.convertNanosToMillis(high)) { - bucketCount++; - // Below rounding is needed as the interval could return in - // non-rounded values for something like calendar month - roundedLow = preparedRounding.round(roundedLow + interval); - if (prevRounded == roundedLow) break; - prevRounded = roundedLow; - } - - Weight[] filters = null; - if (bucketCount > 0 && bucketCount <= MAX_NUM_FILTER_BUCKETS) { - int i = 0; - filters = new Weight[bucketCount]; - roundedLow = preparedRounding.round(fieldType.convertNanosToMillis(low)); - while (i < bucketCount) { - // Calculate the lower bucket bound - final byte[] lower = new byte[8]; - NumericUtils.longToSortableBytes(i == 0 ? low : fieldType.convertRoundedMillisToNanos(roundedLow), lower, 0); - // Calculate the upper bucket bound - final byte[] upper = new byte[8]; - roundedLow = preparedRounding.round(roundedLow + interval); - // Subtract -1 if the minimum is roundedLow as roundedLow itself - // is included in the next bucket - NumericUtils.longToSortableBytes( - i + 1 == bucketCount ? high : fieldType.convertRoundedMillisToNanos(roundedLow) - 1, - upper, - 0 - ); - filters[i++] = context.searcher().createWeight(new PointRangeQuery(field, lower, upper, 1) { - @Override - protected String toString(int dimension, byte[] value) { - return null; - } - }, ScoreMode.COMPLETE_NO_SCORES, 1); - } - } - - return filters; - } - - static FilterContext buildFastFilterContext( - final Object parent, - final int subAggLength, - SearchContext context, - Function<long[], Rounding> roundingFunction, - Supplier<Rounding.Prepared> preparedRoundingSupplier, - ValuesSourceConfig valuesSourceConfig, - CheckedFunction<FieldContext, long[], IOException> computeBounds - ) throws IOException { - // Create the filters for fast aggregation only if the query is instance - // of point range query and there aren't any parent/sub aggregations - if (parent == null && subAggLength == 0 && valuesSourceConfig.missing() == null && valuesSourceConfig.script() == null) { - final FieldContext fieldContext = valuesSourceConfig.fieldContext(); - if (fieldContext != null) { - final String fieldName = fieldContext.field(); - final long[] bounds = computeBounds.apply(fieldContext); - if (bounds != null) { - assert fieldContext.fieldType() instanceof DateFieldMapper.DateFieldType; - final DateFieldMapper.DateFieldType fieldType = (DateFieldMapper.DateFieldType) fieldContext.fieldType(); - final Rounding rounding = roundingFunction.apply(bounds); - final Weight[] filters = FilterRewriteHelper.createFilterForAggregations( - context, - rounding, - preparedRoundingSupplier.get(), - fieldName, - fieldType, - bounds[0], - bounds[1] - ); - return new FilterContext(fieldType, filters); - } - } - } - return null; - } - - static long getBucketOrd(long bucketOrd) { - if (bucketOrd < 0) { // already seen - bucketOrd = -1 - bucketOrd; - } - - return bucketOrd; - } - - static boolean tryFastFilterAggregation( - final LeafReaderContext ctx, - final Weight[] filters, - final DateFieldMapper.DateFieldType fieldType, - final BiConsumer<Long, Integer> incrementDocCount - ) throws IOException { - final int[] counts = new int[filters.length]; - int i; - for (i = 0; i < filters.length; i++) { - counts[i] = filters[i].count(ctx); - if (counts[i] == -1) { - // Cannot use the optimization if any of the counts - // is -1 indicating the segment might have deleted documents - return false; - } - } - - for (i = 0; i < filters.length; i++) { - if (counts[i] > 0) { - incrementDocCount.accept( - fieldType.convertNanosToMillis( - NumericUtils.sortableBytesToLong(((PointRangeQuery) filters[i].getQuery()).getLowerPoint(), 0) - ), - counts[i] - ); - } - } - throw new CollectionTerminatedException(); - } -} diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index eabc4b7764eed..b581e552fec4f 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1279,7 +1279,7 @@ public void testWithDateHistogram() throws IOException { }, (result) -> { assertEquals(3, result.getBuckets().size()); - assertEquals("{date=1508457600000}", result.afterKey().toString()); + assertEquals("{date=1508457600000}", result.afterKey().toString()); // 2017-10-20T00:00:00 assertEquals("{date=1474329600000}", result.getBuckets().get(0).getKeyAsString()); assertEquals(2L, result.getBuckets().get(0).getDocCount()); assertEquals("{date=1508371200000}", result.getBuckets().get(1).getKeyAsString()); @@ -1300,9 +1300,8 @@ public void testWithDateHistogram() throws IOException { DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date").field("date") .calendarInterval(DateHistogramInterval.days(1)); return new CompositeAggregationBuilder("name", Collections.singletonList(histo)).aggregateAfter( - createAfterKey("date", 1474329600000L) + createAfterKey("date", 1474329600000L) // 2016-09-20T00:00:00 ); - }, (result) -> { assertEquals(2, result.getBuckets().size()); @@ -2242,21 +2241,20 @@ private <T extends Comparable<T>, V extends Comparable<T>> void testRandomTerms( Function<Object, V> transformKey ) throws IOException { int numTerms = randomIntBetween(10, 500); - List<T> terms = new ArrayList<>(); + List<T> terms = new ArrayList<>(); // possible values for the terms for (int i = 0; i < numTerms; i++) { terms.add(randomSupplier.get()); } int numDocs = randomIntBetween(100, 200); List<Map<String, List<Object>>> dataset = new ArrayList<>(); - - Set<T> valuesSet = new HashSet<>(); - Map<Comparable<?>, AtomicLong> expectedDocCounts = new HashMap<>(); + Set<T> valuesSet = new HashSet<>(); // how many different values + Map<Comparable<?>, AtomicLong> expectedDocCounts = new HashMap<>(); // how many docs for each value for (int i = 0; i < numDocs; i++) { int numValues = randomIntBetween(1, 5); Set<Object> values = new HashSet<>(); for (int j = 0; j < numValues; j++) { int rand = randomIntBetween(0, terms.size() - 1); - if (values.add(terms.get(rand))) { + if (values.add(terms.get(rand))) { // values are unique for one doc AtomicLong count = expectedDocCounts.computeIfAbsent(terms.get(rand), (k) -> new AtomicLong(0)); count.incrementAndGet(); valuesSet.add(terms.get(rand)); @@ -2264,9 +2262,8 @@ private <T extends Comparable<T>, V extends Comparable<T>> void testRandomTerms( } dataset.add(Collections.singletonMap(field, new ArrayList<>(values))); } - List<T> expected = new ArrayList<>(valuesSet); + List<T> expected = new ArrayList<>(valuesSet); // how many buckets expected Collections.sort(expected); - List<Comparable<T>> seen = new ArrayList<>(); AtomicBoolean finish = new AtomicBoolean(false); int size = randomIntBetween(1, expected.size()); From 309413f9078d8900e2779242a61febeba610ae4d Mon Sep 17 00:00:00 2001 From: Siddhant Deshmukh <deshsid@amazon.com> Date: Wed, 17 Jan 2024 16:07:27 -0800 Subject: [PATCH 098/178] Bump com.google.api:gax-httpjson from 0.103.1 to 2.39.0 in /plugins/repository-gcs (#11912) --------- Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: Siddhant Deshmukh <deshsid@amazon.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/repository-gcs/build.gradle | 2 +- plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 | 1 - plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index ea96035807276..9ff8dcf623681 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -172,6 +172,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) - Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) +- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 1bef5146f1db9..af6bf38cb065c 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -54,7 +54,7 @@ versions << [ dependencies { api 'com.google.api:api-common:1.8.1' api 'com.google.api:gax:2.35.0' - api 'com.google.api:gax-httpjson:0.103.1' + api 'com.google.api:gax-httpjson:2.39.0' api 'com.google.apis:google-api-services-storage:v1-rev20230617-2.0.0' diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 deleted file mode 100644 index 11315004e233d..0000000000000 --- a/plugins/repository-gcs/licenses/gax-httpjson-0.103.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -041d99172fda933bc879bdfd8de9420c5c34107e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 new file mode 100644 index 0000000000000..d0f3b8dfa6d25 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 @@ -0,0 +1 @@ +ebff086f21c54c1eb02056f94255b4f1836a8efe \ No newline at end of file From 904c9a99987c5bb2b2db02a9e2c911af51f23616 Mon Sep 17 00:00:00 2001 From: Ashish <ssashish@amazon.com> Date: Thu, 18 Jan 2024 10:06:20 +0530 Subject: [PATCH 099/178] Fix flaky RemoteFSTranslogTests testMetadataFileDeletion test #9605 (#11899) Signed-off-by: Ashish Singh <ssashish@amazon.com> --- .../index/translog/RemoteFsTranslogTests.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java b/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java index 6bfab278993ed..a83e737dc25c1 100644 --- a/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/RemoteFsTranslogTests.java @@ -86,7 +86,6 @@ import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; -import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -715,6 +714,7 @@ public void testSimpleOperationsUpload() throws Exception { translog.setMinSeqNoToKeep(0); // This should not trim anything from local translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(2, translog.readers.size()); assertBusy(() -> { assertEquals(4, translog.allUploaded().size()); @@ -728,6 +728,7 @@ public void testSimpleOperationsUpload() throws Exception { // This should not trim tlog-2.* files from remote as we not uploading any more translog to remote translog.setMinSeqNoToKeep(1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertBusy(() -> { assertEquals(4, translog.allUploaded().size()); @@ -766,6 +767,7 @@ public void testMetadataFileDeletion() throws Exception { addToTranslogAndListAndUpload(translog, ops, new Translog.Index(String.valueOf(i), i, primaryTerm.get(), new byte[] { 1 })); translog.setMinSeqNoToKeep(i); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); } assertBusy(() -> assertEquals(4, translog.allUploaded().size())); @@ -776,6 +778,7 @@ public void testMetadataFileDeletion() throws Exception { addToTranslogAndListAndUpload(translog, ops, new Translog.Index(String.valueOf(i), i, primaryTerm.get(), new byte[] { 1 })); } translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1 + moreDocs, translog.readers.size()); assertBusy(() -> assertEquals(2 + 2L * moreDocs, translog.allUploaded().size())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); @@ -783,6 +786,7 @@ public void testMetadataFileDeletion() throws Exception { int totalDocs = numDocs + moreDocs; translog.setMinSeqNoToKeep(totalDocs - 1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); addToTranslogAndListAndUpload( translog, @@ -791,6 +795,7 @@ public void testMetadataFileDeletion() throws Exception { ); translog.setMinSeqNoToKeep(totalDocs); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); // Change primary term and test the deletion of older primaries @@ -841,6 +846,7 @@ public void testDrainSync() throws Exception { translog.setMinSeqNoToKeep(0); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); // Case 1 - During ongoing uploads, the available permits are 0. @@ -869,6 +875,7 @@ public void testDrainSync() throws Exception { // Case 3 - After drainSync, if trimUnreferencedReaders is attempted, we do not delete from remote store. translog.setMinSeqNoToKeep(1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertEquals(6, translog.allUploaded().size()); assertEquals(mdFiles, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR))); @@ -892,6 +899,7 @@ public void testDrainSync() throws Exception { translog.setMinSeqNoToKeep(3); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); assertEquals(1, translog.readers.size()); assertBusy(() -> assertEquals(4, translog.allUploaded().size())); assertBusy(() -> assertEquals(1, blobStoreTransferService.listAll(getTranslogDirectory().add(METADATA_DIR)).size())); @@ -1048,7 +1056,7 @@ public void testConcurrentWriteViewsAndSnapshot() throws Throwable { final int threadId = i; writers[i] = new Thread(new AbstractRunnable() { @Override - public void doRun() throws BrokenBarrierException, InterruptedException, IOException { + public void doRun() throws Exception { barrier.await(); int counter = 0; while (run.get() && idGenerator.get() < maxOps) { @@ -1090,6 +1098,7 @@ public void doRun() throws BrokenBarrierException, InterruptedException, IOExcep // deletionPolicy.setLocalCheckpointOfSafeCommit(localCheckpoint); translog.setMinSeqNoToKeep(localCheckpoint + 1); translog.trimUnreferencedReaders(); + assertBusy(() -> assertTrue(translog.isRemoteGenerationDeletionPermitsAvailable())); } } if (id % 7 == 0) { From b424aafdffb09cbc56cbc436bb1f4073b2f9f079 Mon Sep 17 00:00:00 2001 From: Ketan Verma <9292653+ketanv3@users.noreply.github.com> Date: Thu, 18 Jan 2024 23:01:26 +0530 Subject: [PATCH 100/178] Add experimental SIMD implementation of B-tree to round down dates (#11194) * Add experimental SIMD implementation of B-tree to round down dates Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Use system properties in favor of feature flags to remove dependency on the server module Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Removed Java 20 test sources to simplify builds Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Remove the use of forbidden APIs in unit-tests Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Migrate to the recommended usage for custom test task classpath Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Switch benchmarks module to multi-release one Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Add JMH annotation processing for JDK-20+ sources Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Make JMH annotations consistent across sources Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Improve execution of Roundable unit-tests Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Revert "Improve execution of Roundable unit-tests" This reverts commit 2e82d0aa7b114000bcf58f9485377eb5705ba3b1. Signed-off-by: Ketan Verma <ketan9495@gmail.com> * Add 'forced' as a possible feature flag value to simplify the execution of unit-tests Signed-off-by: Ketan Verma <ketan9495@gmail.com> --------- Signed-off-by: Ketan Verma <ketan9495@gmail.com> Signed-off-by: Andriy Redko <andriy.redko@aiven.io> Co-authored-by: Andriy Redko <andriy.redko@aiven.io> Co-authored-by: Andrew Ross <andrross@amazon.com> --- CHANGELOG.md | 1 + benchmarks/build.gradle | 42 ++++++++ .../common/round/RoundableBenchmark.java | 18 +--- .../common/round/RoundableSupplier.java | 35 ++++++ .../common/round/RoundableSupplier.java | 36 +++++++ libs/common/build.gradle | 61 +++++++++++ .../common/round/BtreeSearcher.java | 100 ++++++++++++++++++ .../common/round/RoundableFactory.java | 57 ++++++++++ .../common/round/RoundableTests.java | 54 ++++++---- 9 files changed, 370 insertions(+), 34 deletions(-) create mode 100644 benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java create mode 100644 benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java create mode 100644 libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java create mode 100644 libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ff8dcf623681..b509a5963feb9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -118,6 +118,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Request level coordinator slow logs ([#10650](https://github.com/opensearch-project/OpenSearch/pull/10650)) - Add template snippets support for field and target_field in KV ingest processor ([#10040](https://github.com/opensearch-project/OpenSearch/pull/10040)) - Allowing pipeline processors to access index mapping info by passing ingest service ref as part of the processor factory parameters ([#10307](https://github.com/opensearch-project/OpenSearch/pull/10307)) +- Add experimental SIMD implementation of B-tree to round down dates ([#11194](https://github.com/opensearch-project/OpenSearch/issues/11194)) - Make number of segment metadata files in remote segment store configurable ([#11329](https://github.com/opensearch-project/OpenSearch/pull/11329)) - Allow changing number of replicas of searchable snapshot index ([#11317](https://github.com/opensearch-project/OpenSearch/pull/11317)) - Adding slf4j license header to LoggerMessageFormat.java ([#11069](https://github.com/opensearch-project/OpenSearch/pull/11069)) diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index 6b4634c7e791c..be4579b4e5324 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -84,3 +84,45 @@ spotless { targetExclude 'src/main/generated/**/*.java' } } + +if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) { + // Add support for incubator modules on supported Java versions. + run.jvmArgs += ['--add-modules=jdk.incubator.vector'] + run.classpath += files(jar.archiveFile) + run.classpath -= sourceSets.main.output + evaluationDependsOn(':libs:opensearch-common') + + sourceSets { + java20 { + java { + srcDirs = ['src/main/java20'] + } + } + } + + configurations { + java20Implementation.extendsFrom(implementation) + } + + dependencies { + java20Implementation sourceSets.main.output + java20Implementation project(':libs:opensearch-common').sourceSets.java20.output + java20AnnotationProcessor "org.openjdk.jmh:jmh-generator-annprocess:$versions.jmh" + } + + compileJava20Java { + targetCompatibility = JavaVersion.VERSION_20 + options.compilerArgs.addAll(["-processor", "org.openjdk.jmh.generators.BenchmarkProcessor"]) + } + + jar { + metaInf { + into 'versions/20' + from sourceSets.java20.output + } + manifest.attributes('Multi-Release': 'true') + } + + // classes generated by JMH can use all sorts of forbidden APIs but we have no influence at all and cannot exclude these classes + disableTasks('forbiddenApisJava20') +} diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java index 4e07af452968b..3909a3f4eb8fc 100644 --- a/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java +++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableBenchmark.java @@ -21,7 +21,6 @@ import org.openjdk.jmh.infra.Blackhole; import java.util.Random; -import java.util.function.Supplier; @Fork(value = 3) @Warmup(iterations = 3, time = 1) @@ -83,17 +82,17 @@ public static class Options { "256" }) public Integer size; - @Param({ "binary", "linear" }) + @Param({ "binary", "linear", "btree" }) public String type; @Param({ "uniform", "skewed_edge", "skewed_center" }) public String distribution; public long[] queries; - public Supplier<Roundable> supplier; + public RoundableSupplier supplier; @Setup - public void setup() { + public void setup() throws ClassNotFoundException { Random random = new Random(size); long[] values = new long[size]; for (int i = 1; i < values.length; i++) { @@ -128,16 +127,7 @@ public void setup() { throw new IllegalArgumentException("invalid distribution: " + distribution); } - switch (type) { - case "binary": - supplier = () -> new BinarySearcher(values, size); - break; - case "linear": - supplier = () -> new BidirectionalLinearSearcher(values, size); - break; - default: - throw new IllegalArgumentException("invalid type: " + type); - } + supplier = new RoundableSupplier(type, values, size); } private static long nextPositiveLong(Random random) { diff --git a/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java new file mode 100644 index 0000000000000..44ac42810996f --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/common/round/RoundableSupplier.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import java.util.function.Supplier; + +public class RoundableSupplier implements Supplier<Roundable> { + private final Supplier<Roundable> delegate; + + RoundableSupplier(String type, long[] values, int size) throws ClassNotFoundException { + switch (type) { + case "binary": + delegate = () -> new BinarySearcher(values, size); + break; + case "linear": + delegate = () -> new BidirectionalLinearSearcher(values, size); + break; + case "btree": + throw new ClassNotFoundException("BtreeSearcher is not supported below JDK 20"); + default: + throw new IllegalArgumentException("invalid type: " + type); + } + } + + @Override + public Roundable get() { + return delegate.get(); + } +} diff --git a/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java new file mode 100644 index 0000000000000..e81c1b137bd30 --- /dev/null +++ b/benchmarks/src/main/java20/org/opensearch/common/round/RoundableSupplier.java @@ -0,0 +1,36 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import java.util.function.Supplier; + +public class RoundableSupplier implements Supplier<Roundable> { + private final Supplier<Roundable> delegate; + + RoundableSupplier(String type, long[] values, int size) { + switch (type) { + case "binary": + delegate = () -> new BinarySearcher(values, size); + break; + case "linear": + delegate = () -> new BidirectionalLinearSearcher(values, size); + break; + case "btree": + delegate = () -> new BtreeSearcher(values, size); + break; + default: + throw new IllegalArgumentException("invalid type: " + type); + } + } + + @Override + public Roundable get() { + return delegate.get(); + } +} diff --git a/libs/common/build.gradle b/libs/common/build.gradle index 4f89b81636420..60bf488833393 100644 --- a/libs/common/build.gradle +++ b/libs/common/build.gradle @@ -43,3 +43,64 @@ tasks.named('forbiddenApisMain').configure { // TODO: Need to decide how we want to handle for forbidden signatures with the changes to server replaceSignatureFiles 'jdk-signatures' } + +// Add support for incubator modules on supported Java versions. +if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_20) { + sourceSets { + java20 { + java { + srcDirs = ['src/main/java20'] + } + } + } + + configurations { + java20Implementation.extendsFrom(implementation) + } + + dependencies { + java20Implementation sourceSets.main.output + } + + compileJava20Java { + targetCompatibility = JavaVersion.VERSION_20 + options.compilerArgs += ['--add-modules', 'jdk.incubator.vector'] + options.compilerArgs -= '-Werror' // use of incubator modules is reported as a warning + } + + jar { + metaInf { + into 'versions/20' + from sourceSets.java20.output + } + manifest.attributes('Multi-Release': 'true') + } + + tasks.withType(Test).configureEach { + // Relying on the convention for Test.classpath in custom Test tasks has been deprecated + // and scheduled to be removed in Gradle 9.0. Below lines are added from the migration guide: + // https://docs.gradle.org/8.5/userguide/upgrading_version_8.html#test_task_default_classpath + testClassesDirs = testing.suites.test.sources.output.classesDirs + classpath = testing.suites.test.sources.runtimeClasspath + + // Adds the multi-release JAR to the classpath when executing tests. + // This allows newer sources to be picked up at test runtime (if supported). + classpath += files(jar.archiveFile) + // Removes the "main" sources from the classpath to avoid JarHell problems as + // the multi-release JAR already contains those classes. + classpath -= sourceSets.main.output + } + + tasks.register('roundableSimdTest', Test) { + group 'verification' + include '**/RoundableTests.class' + systemProperty 'opensearch.experimental.feature.simd.rounding.enabled', 'forced' + } + + check.dependsOn(roundableSimdTest) + + forbiddenApisJava20 { + failOnMissingClasses = false + ignoreSignaturesOfMissingClasses = true + } +} diff --git a/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java new file mode 100644 index 0000000000000..626fb6e6b810e --- /dev/null +++ b/libs/common/src/main/java20/org/opensearch/common/round/BtreeSearcher.java @@ -0,0 +1,100 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import org.opensearch.common.annotation.InternalApi; + +import jdk.incubator.vector.LongVector; +import jdk.incubator.vector.Vector; +import jdk.incubator.vector.VectorOperators; +import jdk.incubator.vector.VectorSpecies; + +/** + * It uses vectorized B-tree search to find the round-down point. + * + * @opensearch.internal + */ +@InternalApi +class BtreeSearcher implements Roundable { + private static final VectorSpecies<Long> LONG_VECTOR_SPECIES = LongVector.SPECIES_PREFERRED; + private static final int LANES = LONG_VECTOR_SPECIES.length(); + private static final int SHIFT = log2(LANES); + + private final long[] values; + private final long minValue; + + BtreeSearcher(long[] values, int size) { + if (size <= 0) { + throw new IllegalArgumentException("at least one value must be present"); + } + + int blocks = (size + LANES - 1) / LANES; // number of blocks + int length = 1 + blocks * LANES; // size of the backing array (1-indexed) + + this.minValue = values[0]; + this.values = new long[length]; + build(values, 0, size, this.values, 1); + } + + /** + * Builds the B-tree memory layout. + * It builds the tree recursively, following an in-order traversal. + * + * <p> + * Each block stores 'lanes' values at indices {@code i, i + 1, ..., i + lanes - 1} where {@code i} is the + * starting offset. The starting offset of the root block is 1. The branching factor is (1 + lanes) so each + * block can have these many children. Given the starting offset {@code i} of a block, the starting offset + * of its k-th child (ranging from {@code 0, 1, ..., k}) can be computed as {@code i + ((i + k) << shift)}. + * + * @param src is the sorted input array + * @param i is the index in the input array to read the value from + * @param size the number of values in the input array + * @param dst is the output array + * @param j is the index in the output array to write the value to + * @return the next index 'i' + */ + private static int build(long[] src, int i, int size, long[] dst, int j) { + if (j < dst.length) { + for (int k = 0; k < LANES; k++) { + i = build(src, i, size, dst, j + ((j + k) << SHIFT)); + + // Fills the B-tree as a complete tree, i.e., all levels are completely filled, + // except the last level which is filled from left to right. + // The trick is to fill the destination array between indices 1...size (inclusive / 1-indexed) + // and pad the remaining array with +infinity. + dst[j + k] = (j + k <= size) ? src[i++] : Long.MAX_VALUE; + } + i = build(src, i, size, dst, j + ((j + LANES) << SHIFT)); + } + return i; + } + + @Override + public long floor(long key) { + Vector<Long> keyVector = LongVector.broadcast(LONG_VECTOR_SPECIES, key); + int i = 1, result = 1; + + while (i < values.length) { + Vector<Long> valuesVector = LongVector.fromArray(LONG_VECTOR_SPECIES, values, i); + int j = i + valuesVector.compare(VectorOperators.GT, keyVector).firstTrue(); + result = (j > i) ? j : result; + i += (j << SHIFT); + } + + assert result > 1 : "key must be greater than or equal to " + minValue; + return values[result - 1]; + } + + private static int log2(int num) { + if ((num <= 0) || ((num & (num - 1)) != 0)) { + throw new IllegalArgumentException(num + " is not a positive power of 2"); + } + return 32 - Integer.numberOfLeadingZeros(num - 1); + } +} diff --git a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java new file mode 100644 index 0000000000000..c5ec45bceb5bd --- /dev/null +++ b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java @@ -0,0 +1,57 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.round; + +import org.opensearch.common.annotation.InternalApi; + +import jdk.incubator.vector.LongVector; + +/** + * Factory class to create and return the fastest implementation of {@link Roundable}. + * + * @opensearch.internal + */ +@InternalApi +public final class RoundableFactory { + /** + * The maximum limit up to which linear search is used, otherwise binary or B-tree search is used. + * This is because linear search is much faster on small arrays. + * Benchmark results: <a href="https://github.com/opensearch-project/OpenSearch/pull/9727">PR #9727</a> + */ + private static final int LINEAR_SEARCH_MAX_SIZE = 64; + + /** + * Indicates whether the vectorized (SIMD) B-tree search implementation is to be used. + * It is true when either: + * 1. The feature flag is set to "forced", or + * 2. The platform has a minimum of 4 long vector lanes and the feature flag is set to "true". + */ + private static final boolean USE_BTREE_SEARCHER; + + static { + String simdRoundingFeatureFlag = System.getProperty("opensearch.experimental.feature.simd.rounding.enabled"); + USE_BTREE_SEARCHER = "forced".equalsIgnoreCase(simdRoundingFeatureFlag) + || (LongVector.SPECIES_PREFERRED.length() >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag)); + } + + private RoundableFactory() {} + + /** + * Creates and returns the fastest implementation of {@link Roundable}. + */ + public static Roundable create(long[] values, int size) { + if (size <= LINEAR_SEARCH_MAX_SIZE) { + return new BidirectionalLinearSearcher(values, size); + } else if (USE_BTREE_SEARCHER) { + return new BtreeSearcher(values, size); + } else { + return new BinarySearcher(values, size); + } + } +} diff --git a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java index ae9f629c59024..ad19f456b0df4 100644 --- a/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java +++ b/libs/common/src/test/java/org/opensearch/common/round/RoundableTests.java @@ -12,15 +12,31 @@ public class RoundableTests extends OpenSearchTestCase { - public void testFloor() { - int size = randomIntBetween(1, 256); - long[] values = new long[size]; - for (int i = 1; i < values.length; i++) { - values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1; - } + public void testRoundingEmptyArray() { + Throwable throwable = assertThrows(IllegalArgumentException.class, () -> RoundableFactory.create(new long[0], 0)); + assertEquals("at least one value must be present", throwable.getMessage()); + } + + public void testRoundingSmallArray() { + int size = randomIntBetween(1, 64); + long[] values = randomArrayOfSortedValues(size); + Roundable roundable = RoundableFactory.create(values, size); + + assertEquals("BidirectionalLinearSearcher", roundable.getClass().getSimpleName()); + assertRounding(roundable, values, size); + } - Roundable[] impls = { new BinarySearcher(values, size), new BidirectionalLinearSearcher(values, size) }; + public void testRoundingLargeArray() { + int size = randomIntBetween(65, 256); + long[] values = randomArrayOfSortedValues(size); + Roundable roundable = RoundableFactory.create(values, size); + boolean useBtreeSearcher = "forced".equalsIgnoreCase(System.getProperty("opensearch.experimental.feature.simd.rounding.enabled")); + assertEquals(useBtreeSearcher ? "BtreeSearcher" : "BinarySearcher", roundable.getClass().getSimpleName()); + assertRounding(roundable, values, size); + } + + private void assertRounding(Roundable roundable, long[] values, int size) { for (int i = 0; i < 100000; i++) { // Index of the expected round-down point. int idx = randomIntBetween(0, size - 1); @@ -35,23 +51,21 @@ public void testFloor() { // round-down point, which will still floor to the same value. long key = expected + (randomNonNegativeLong() % delta); - for (Roundable roundable : impls) { - assertEquals(expected, roundable.floor(key)); - } + assertEquals(expected, roundable.floor(key)); } + + Throwable throwable = assertThrows(AssertionError.class, () -> roundable.floor(values[0] - 1)); + assertEquals("key must be greater than or equal to " + values[0], throwable.getMessage()); } - public void testFailureCases() { - Throwable throwable; + private static long[] randomArrayOfSortedValues(int size) { + int capacity = size + randomInt(20); // May be slightly more than the size. + long[] values = new long[capacity]; - throwable = assertThrows(IllegalArgumentException.class, () -> new BinarySearcher(new long[0], 0)); - assertEquals("at least one value must be present", throwable.getMessage()); - throwable = assertThrows(IllegalArgumentException.class, () -> new BidirectionalLinearSearcher(new long[0], 0)); - assertEquals("at least one value must be present", throwable.getMessage()); + for (int i = 1; i < size; i++) { + values[i] = values[i - 1] + (randomNonNegativeLong() % 200) + 1; + } - throwable = assertThrows(AssertionError.class, () -> new BinarySearcher(new long[] { 100 }, 1).floor(50)); - assertEquals("key must be greater than or equal to 100", throwable.getMessage()); - throwable = assertThrows(AssertionError.class, () -> new BidirectionalLinearSearcher(new long[] { 100 }, 1).floor(50)); - assertEquals("key must be greater than or equal to 100", throwable.getMessage()); + return values; } } From 412d1aa39ee79e8fabdcf39e4fa2813761f0a500 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Fri, 19 Jan 2024 01:56:29 +0800 Subject: [PATCH 101/178] Update supported version for fixing the bug of noop_update_total metric cannot be updated by bulk API (#11917) * Update supported version for fixing the bug of noop_update_total metric cannot be updated by bulk API Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Modify change log Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 2 +- .../rest-api-spec/test/indices.stats/50_noop_update.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b509a5963feb9..5525489a8953b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -226,7 +226,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix remote shards balancer and remove unused variables ([#11167](https://github.com/opensearch-project/OpenSearch/pull/11167)) - Fix parsing of flat object fields with dots in keys ([#11425](https://github.com/opensearch-project/OpenSearch/pull/11425)) - Fix bug where replication lag grows post primary relocation ([#11238](https://github.com/opensearch-project/OpenSearch/pull/11238)) -- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485)) +- Fix noop_update_total metric in indexing stats cannot be updated by bulk API ([#11485](https://github.com/opensearch-project/OpenSearch/pull/11485),[#11917](https://github.com/opensearch-project/OpenSearch/pull/11917)) - Fix for stuck update action in a bulk with `retry_on_conflict` property ([#11152](https://github.com/opensearch-project/OpenSearch/issues/11152)) - Fix template setting override for replication type ([#11417](https://github.com/opensearch-project/OpenSearch/pull/11417)) - Fix Automatic addition of protocol broken in #11512 ([#11609](https://github.com/opensearch-project/OpenSearch/pull/11609)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml index dd8c2a2deb721..2192873623715 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/50_noop_update.yml @@ -23,8 +23,8 @@ setup: --- "Test noop_update_total metric can be updated by both update API and bulk API": - skip: - version: " - 2.99.99" #TODO: change to 2.11.99 after the PR is backported to 2.x branch - reason: "fixed in 3.0" + version: " - 2.11.99" + reason: "fixed in 2.12.0" - do: update: From 2f81c5761ae04921b5ecca2299c466998865d7ad Mon Sep 17 00:00:00 2001 From: panguixin <panguixin@bytedance.com> Date: Fri, 19 Jan 2024 02:25:18 +0800 Subject: [PATCH 102/178] Support index level allocation filtering for searchable snapshot index (#11563) * support index level allocation filtering Signed-off-by: panguixin <panguixin@bytedance.com> * run spotless Signed-off-by: panguixin <panguixin@bytedance.com> * fix IT Signed-off-by: panguixin <panguixin@bytedance.com> * fix not statement and add change log Signed-off-by: panguixin <panguixin@bytedance.com> --------- Signed-off-by: panguixin <panguixin@bytedance.com> --- CHANGELOG.md | 1 + .../snapshots/SearchableSnapshotIT.java | 71 +++++++++- .../put/TransportUpdateSettingsAction.java | 2 +- .../cluster/routing/RoutingNodes.java | 17 --- .../allocator/RemoteShardsBalancer.java | 129 ++++++++++-------- .../cluster/routing/RoutingNodesTests.java | 38 ------ .../RemoteShardsMoveShardsTests.java | 9 +- 7 files changed, 151 insertions(+), 116 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5525489a8953b..e685f76770cb7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -131,6 +131,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) +- Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index 9a92ddc81852a..cb050e17cb237 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -7,6 +7,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.carrotsearch.randomizedtesting.generators.RandomPicks; +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.node.stats.NodeStats; import org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.opensearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -25,11 +26,14 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.block.ClusterBlockException; import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.GroupShardsIterator; import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.common.Priority; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; +import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.index.Index; import org.opensearch.index.IndexModule; @@ -47,6 +51,8 @@ import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import java.util.stream.Stream; import java.util.stream.StreamSupport; @@ -234,6 +240,62 @@ public void testSearchableSnapshotAllocationForLocalAndRemoteShardsOnSameNode() assertDocCount(indexName, 100L); } + public void testSearchableSnapshotAllocationFilterSettings() throws Exception { + final int numShardsIndex = randomIntBetween(3, 6); + final String indexName = "test-idx"; + final String restoredIndexName = indexName + "-copy"; + final String repoName = "test-repo"; + final String snapshotName = "test-snap"; + final Client client = client(); + + internalCluster().ensureAtLeastNumSearchAndDataNodes(numShardsIndex); + createIndexWithDocsAndEnsureGreen(numShardsIndex, 1, 100, indexName); + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName); + + restoreSnapshotAndEnsureGreen(client, snapshotName, repoName); + assertRemoteSnapshotIndexSettings(client, restoredIndexName); + final Set<String> searchNodes = StreamSupport.stream(clusterService().state().getNodes().spliterator(), false) + .filter(DiscoveryNode::isSearchNode) + .map(DiscoveryNode::getId) + .collect(Collectors.toSet()); + + for (int i = searchNodes.size(); i > 2; --i) { + String pickedNode = randomFrom(searchNodes); + searchNodes.remove(pickedNode); + assertIndexAssignedToNodeOrNot(restoredIndexName, pickedNode, true); + assertTrue( + client.admin() + .indices() + .prepareUpdateSettings(restoredIndexName) + .setSettings(Settings.builder().put("index.routing.allocation.exclude._id", pickedNode)) + .execute() + .actionGet() + .isAcknowledged() + ); + ClusterHealthResponse clusterHealthResponse = client.admin() + .cluster() + .prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(new TimeValue(5, TimeUnit.MINUTES)) + .execute() + .actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + assertIndexAssignedToNodeOrNot(restoredIndexName, pickedNode, false); + assertIndexAssignedToNodeOrNot(indexName, pickedNode, true); + } + } + + private void assertIndexAssignedToNodeOrNot(String index, String node, boolean assigned) { + final ClusterState state = clusterService().state(); + if (assigned) { + assertTrue(state.getRoutingTable().allShards(index).stream().anyMatch(shard -> shard.currentNodeId().equals(node))); + } else { + assertTrue(state.getRoutingTable().allShards(index).stream().noneMatch(shard -> shard.currentNodeId().equals(node))); + } + } + /** * Tests the functionality of remote shard allocation to * ensure it can handle node drops for failover scenarios and the cluster gets back to a healthy state when @@ -341,11 +403,16 @@ public void testDeleteSearchableSnapshotBackingIndex() throws Exception { } private void createIndexWithDocsAndEnsureGreen(int numReplicasIndex, int numOfDocs, String indexName) throws InterruptedException { + createIndexWithDocsAndEnsureGreen(1, numReplicasIndex, numOfDocs, indexName); + } + + private void createIndexWithDocsAndEnsureGreen(int numShardsIndex, int numReplicasIndex, int numOfDocs, String indexName) + throws InterruptedException { createIndex( indexName, Settings.builder() - .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, Integer.toString(numReplicasIndex)) - .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "1") + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, numReplicasIndex) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShardsIndex) .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.FS.getSettingsKey()) .build() ); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java index 14c985f1d3427..9265c6ae60678 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/settings/put/TransportUpdateSettingsAction.java @@ -82,7 +82,7 @@ public class TransportUpdateSettingsAction extends TransportClusterManagerNodeAc "index.number_of_replicas" ); - private final static String[] ALLOWLIST_REMOTE_SNAPSHOT_SETTINGS_PREFIXES = { "index.search.slowlog" }; + private final static String[] ALLOWLIST_REMOTE_SNAPSHOT_SETTINGS_PREFIXES = { "index.search.slowlog", "index.routing.allocation" }; private final MetadataUpdateSettingsService updateSettingsService; diff --git a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java index a406552f854da..938a603c459c9 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java +++ b/server/src/main/java/org/opensearch/cluster/routing/RoutingNodes.java @@ -729,23 +729,6 @@ assert node(failedShard.currentNodeId()).getByShardId(failedShard.shardId()) == + " was matched but wasn't removed"; } - public void swapPrimaryWithReplica( - Logger logger, - ShardRouting primaryShard, - ShardRouting replicaShard, - RoutingChangesObserver changes - ) { - assert primaryShard.primary() : "Invalid primary shard provided"; - assert !replicaShard.primary() : "Invalid Replica shard provided"; - - ShardRouting newPrimary = primaryShard.moveActivePrimaryToReplica(); - ShardRouting newReplica = replicaShard.moveActiveReplicaToPrimary(); - updateAssigned(primaryShard, newPrimary); - updateAssigned(replicaShard, newReplica); - logger.info("Swap relocation performed for shard [{}]", newPrimary.shortSummary()); - changes.replicaPromoted(newPrimary); - } - private void unassignPrimaryAndPromoteActiveReplicaIfExists( ShardRouting failedShard, UnassignedInfo unassignedInfo, diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java index 8a14ce3f1a288..e2f24e5f503df 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java @@ -84,24 +84,39 @@ void moveShards() { Queue<RoutingNode> excludedNodes = new ArrayDeque<>(); classifyNodesForShardMovement(eligibleNodes, excludedNodes); - if (excludedNodes.isEmpty()) { - logger.debug("No excluded nodes found. Returning..."); - return; - } - - while (!eligibleNodes.isEmpty() && !excludedNodes.isEmpty()) { - RoutingNode sourceNode = excludedNodes.poll(); - for (ShardRouting ineligibleShard : sourceNode) { - if (ineligibleShard.started() == false) { + // move shards that cannot remain on eligible nodes + final List<ShardRouting> forceMoveShards = new ArrayList<>(); + eligibleNodes.forEach(sourceNode -> { + for (final ShardRouting shardRouting : sourceNode) { + if (ineligibleForMove(shardRouting)) { continue; } - if (!RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(ineligibleShard, allocation))) { + if (allocation.deciders().canRemain(shardRouting, sourceNode, allocation) == Decision.NO) { + forceMoveShards.add(shardRouting); + } + } + }); + for (final ShardRouting shard : forceMoveShards) { + if (eligibleNodes.isEmpty()) { + logger.trace("there are no eligible nodes available, return"); + return; + } + + tryShardMovementToEligibleNode(eligibleNodes, shard); + } + + // move shards that are currently assigned on excluded nodes + while (eligibleNodes.isEmpty() == false && excludedNodes.isEmpty() == false) { + RoutingNode sourceNode = excludedNodes.poll(); + for (final ShardRouting ineligibleShard : sourceNode) { + if (ineligibleForMove(ineligibleShard)) { continue; } if (eligibleNodes.isEmpty()) { - break; + logger.trace("there are no eligible nodes available, return"); + return; } tryShardMovementToEligibleNode(eligibleNodes, ineligibleShard); @@ -109,6 +124,10 @@ void moveShards() { } } + private boolean ineligibleForMove(ShardRouting shard) { + return shard.started() == false || RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false; + } + /** * Classifies the nodes into eligible and excluded depending on whether node is able or unable for shard assignment * @param eligibleNodes contains the list of classified nodes eligible to accept shards @@ -145,10 +164,23 @@ private void classifyNodesForShardMovement(Queue<RoutingNode> eligibleNodes, Que * @param shard the ineligible shard to be moved */ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, ShardRouting shard) { - Set<String> nodesCheckedForShard = new HashSet<>(); - while (!eligibleNodes.isEmpty()) { - RoutingNode targetNode = eligibleNodes.poll(); - Decision currentShardDecision = allocation.deciders().canAllocate(shard, targetNode, allocation); + final Set<String> nodesCheckedForShard = new HashSet<>(); + int numNodesToCheck = eligibleNodes.size(); + while (eligibleNodes.isEmpty() == false) { + assert numNodesToCheck > 0; + final RoutingNode targetNode = eligibleNodes.poll(); + --numNodesToCheck; + // skip the node that the target shard is currently allocated on + if (targetNode.nodeId().equals(shard.currentNodeId())) { + assert nodesCheckedForShard.add(targetNode.nodeId()); + eligibleNodes.offer(targetNode); + if (numNodesToCheck == 0) { + return; + } + continue; + } + + final Decision currentShardDecision = allocation.deciders().canAllocate(shard, targetNode, allocation); if (currentShardDecision.type() == Decision.Type.YES) { if (logger.isDebugEnabled()) { @@ -166,7 +198,7 @@ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, Sh allocation.changes() ); eligibleNodes.offer(targetNode); - break; + return; } else { if (logger.isTraceEnabled()) { logger.trace( @@ -177,18 +209,19 @@ private void tryShardMovementToEligibleNode(Queue<RoutingNode> eligibleNodes, Sh ); } - Decision nodeLevelDecision = allocation.deciders().canAllocateAnyShardToNode(targetNode, allocation); + final Decision nodeLevelDecision = allocation.deciders().canAllocateAnyShardToNode(targetNode, allocation); if (nodeLevelDecision.type() == Decision.Type.YES) { logger.debug("Node: [{}] can still accept shards. Adding it back to the queue.", targetNode.nodeId()); eligibleNodes.offer(targetNode); - nodesCheckedForShard.add(targetNode.nodeId()); + assert nodesCheckedForShard.add(targetNode.nodeId()); } else { logger.debug("Node: [{}] cannot accept any more shards. Removing it from queue.", targetNode.nodeId()); } - // Break out if all nodes in the queue have been checked for this shard - if (eligibleNodes.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId()))) { - break; + // Break out if all eligible nodes have been examined + if (numNodesToCheck == 0) { + assert eligibleNodes.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId())); + return; } } } @@ -225,7 +258,7 @@ void balance() { } } - while (!sourceNodes.isEmpty() && !targetNodes.isEmpty()) { + while (sourceNodes.isEmpty() == false && targetNodes.isEmpty() == false) { RoutingNode sourceNode = sourceNodes.poll(); tryRebalanceNode(sourceNode, targetNodes, avgPrimaryPerNode, nodePrimaryShardCount); } @@ -275,11 +308,11 @@ public Map<String, UnassignedIndexShards> groupUnassignedShardsByIndex() { HashMap<String, UnassignedIndexShards> unassignedShardMap = new HashMap<>(); for (ShardRouting shard : routingNodes.unassigned().drain()) { String index = shard.getIndexName(); - if (!RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation))) { + if (RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false) { routingNodes.unassigned().add(shard); continue; } - if (!unassignedShardMap.containsKey(index)) { + if (unassignedShardMap.containsKey(index) == false) { unassignedShardMap.put(index, new UnassignedIndexShards()); } unassignedShardMap.get(index).addShard(shard); @@ -296,13 +329,15 @@ private void unassignIgnoredRemoteShards(RoutingAllocation routingAllocation) { RoutingNodes.UnassignedShards unassignedShards = routingAllocation.routingNodes().unassigned(); for (ShardRouting shard : unassignedShards.drainIgnored()) { RoutingPool pool = RoutingPool.getShardPool(shard, routingAllocation); - if (pool == RoutingPool.REMOTE_CAPABLE && shard.unassigned() && (shard.primary() || !shard.unassignedInfo().isDelayed())) { + if (pool == RoutingPool.REMOTE_CAPABLE + && shard.unassigned() + && (shard.primary() || shard.unassignedInfo().isDelayed() == false)) { ShardRouting unassignedShard = shard; // Shard when moved to an unassigned state updates the recovery source to be ExistingStoreRecoverySource // Remote shards do not have an existing store to recover from and can be recovered from an empty source // to re-fetch any shard blocks from the repository. if (shard.primary()) { - if (!RecoverySource.Type.SNAPSHOT.equals(shard.recoverySource().getType())) { + if (RecoverySource.Type.SNAPSHOT.equals(shard.recoverySource().getType()) == false) { unassignedShard = shard.updateUnassigned(shard.unassignedInfo(), RecoverySource.EmptyStoreRecoverySource.INSTANCE); } } @@ -353,7 +388,7 @@ private void allocateUnassignedShards( } logger.debug("Allocating shards for index: [{}]", index); - while (!shardsToAllocate.isEmpty() && !nodeQueue.isEmpty()) { + while (shardsToAllocate.isEmpty() == false && nodeQueue.isEmpty() == false) { ShardRouting shard = shardsToAllocate.poll(); if (shard.assignedToNode()) { if (logger.isDebugEnabled()) { @@ -390,7 +425,7 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti boolean allocated = false; boolean throttled = false; Set<String> nodesCheckedForShard = new HashSet<>(); - while (!nodeQueue.isEmpty()) { + while (nodeQueue.isEmpty() == false) { RoutingNode node = nodeQueue.poll(); Decision allocateDecision = allocation.deciders().canAllocate(shard, node, allocation); nodesCheckedForShard.add(node.nodeId()); @@ -449,7 +484,7 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti } } - if (!allocated) { + if (allocated == false) { UnassignedInfo.AllocationStatus status = throttled ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED : UnassignedInfo.AllocationStatus.DECIDERS_NO; @@ -470,14 +505,16 @@ private void tryRebalanceNode( // Try to relocate the valid shards on the sourceNode, one at a time; // until either sourceNode is balanced OR no more active primary shard available OR all the target nodes are exhausted - while (shardsToBalance > 0 && shardIterator.hasNext() && !targetNodes.isEmpty()) { + while (shardsToBalance > 0 && shardIterator.hasNext() && targetNodes.isEmpty() == false) { // Find an active primary shard to relocate ShardRouting shard = shardIterator.next(); - if (!shard.started() || !shard.primary() || !RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation))) { + if (shard.started() == false + || shard.primary() == false + || RoutingPool.REMOTE_CAPABLE.equals(RoutingPool.getShardPool(shard, allocation)) == false) { continue; } - while (!targetNodes.isEmpty()) { + while (targetNodes.isEmpty() == false) { // Find a valid target node that can accommodate the current shard relocation RoutingNode targetNode = targetNodes.poll(); if (primaryCount.get(targetNode.nodeId()) >= avgPrimary) { @@ -485,6 +522,10 @@ private void tryRebalanceNode( continue; } + if (targetNode.getByShardId(shard.shardId()) != null) { + continue; + } + // Try relocate the shard on the target node Decision rebalanceDecision = tryRelocateShard(shard, targetNode); @@ -522,21 +563,10 @@ private void tryRebalanceNode( } /** - * For every primary shard for which this method is invoked, - * swap is attempted with the destination node in case replica shard is present. - * In case replica is not present, relocation of the shard id performed. + * For every primary shard for which this method is invoked, relocation of the shard id performed. */ private Decision tryRelocateShard(ShardRouting shard, RoutingNode destinationNode) { - // Check if there is already a replica for the shard on the destination node. - // Then we can directly swap the replica with the primary shards. - // Invariant: We only allow swap relocation on remote shards. - ShardRouting replicaShard = destinationNode.getByShardId(shard.shardId()); - if (replicaShard != null) { - assert !replicaShard.primary() : "Primary Shard found while expected Replica during shard rebalance"; - return executeSwapShard(shard, replicaShard, allocation); - } - - // Since no replica present on the destinationNode; try relocating the shard to the destination node + assert destinationNode.getByShardId(shard.shardId()) == null; Decision allocationDecision = allocation.deciders().canAllocate(shard, destinationNode, allocation); Decision rebalanceDecision = allocation.deciders().canRebalance(shard, allocation); logger.trace( @@ -566,15 +596,6 @@ private Decision tryRelocateShard(ShardRouting shard, RoutingNode destinationNod return Decision.NO; } - private Decision executeSwapShard(ShardRouting primaryShard, ShardRouting replicaShard, RoutingAllocation allocation) { - if (!replicaShard.started()) { - return new Decision.Single(Decision.Type.NO); - } - - allocation.routingNodes().swapPrimaryWithReplica(logger, primaryShard, replicaShard, allocation.changes()); - return new Decision.Single(Decision.Type.YES); - } - private void failUnattemptedShards() { RoutingNodes.UnassignedShards.UnassignedIterator unassignedIterator = routingNodes.unassigned().iterator(); while (unassignedIterator.hasNext()) { diff --git a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java index 780d041c25d04..7a0fd76b0fbd9 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/RoutingNodesTests.java @@ -46,8 +46,6 @@ import java.util.Iterator; import java.util.List; -import org.mockito.Mockito; - import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -206,40 +204,4 @@ public void testInterleavedShardIteratorReplicaFirst() { } assertEquals(shardCount, this.totalNumberOfShards); } - - public void testSwapPrimaryWithReplica() { - // Initialize all the shards for test index 1 and 2 - initPrimaries(); - startInitializingShards(TEST_INDEX_1); - startInitializingShards(TEST_INDEX_1); - startInitializingShards(TEST_INDEX_2); - startInitializingShards(TEST_INDEX_2); - - // Create primary shard count imbalance between two nodes - final RoutingNodes routingNodes = this.clusterState.getRoutingNodes(); - final RoutingNode node0 = routingNodes.node("node0"); - final RoutingNode node1 = routingNodes.node("node1"); - final List<ShardRouting> shardRoutingList = node0.shardsWithState(TEST_INDEX_1, ShardRoutingState.STARTED); - final RoutingChangesObserver routingChangesObserver = Mockito.mock(RoutingChangesObserver.class); - int swaps = 0; - - for (ShardRouting routing : shardRoutingList) { - if (routing.primary()) { - ShardRouting swap = node1.getByShardId(routing.shardId()); - routingNodes.swapPrimaryWithReplica(logger, routing, swap, routingChangesObserver); - swaps++; - } - } - Mockito.verify(routingChangesObserver, Mockito.times(swaps)).replicaPromoted(Mockito.any()); - - final List<ShardRouting> shards = node1.shardsWithState(TEST_INDEX_1, ShardRoutingState.STARTED); - int shardCount = 0; - for (ShardRouting shard : shards) { - if (shard.primary()) { - shardCount++; - } - } - - assertTrue(shardCount >= swaps); - } } diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java index f2e79b319d0dd..b840b78eff448 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsMoveShardsTests.java @@ -47,7 +47,7 @@ public void testExcludeNodeIdMoveBlocked() { /** * Test move operations for index level allocation settings. - * Supported for local indices, not supported for remote indices. + * Supported for local indices and remote indices. */ public void testIndexLevelExclusions() throws InterruptedException { int localOnlyNodes = 7; @@ -102,8 +102,9 @@ public void testIndexLevelExclusions() throws InterruptedException { // No shard of updated local index should be on excluded local capable node assertTrue(routingTable.allShards(localIndex).stream().noneMatch(shard -> shard.currentNodeId().equals(excludedLocalOnlyNode))); - // Since remote index shards are untouched, at least one shard should - // continue to stay on the excluded remote capable node - assertTrue(routingTable.allShards(remoteIndex).stream().anyMatch(shard -> shard.currentNodeId().equals(excludedRemoteCapableNode))); + // No shard of updated remote index should be on excluded remote capable node + assertTrue( + routingTable.allShards(remoteIndex).stream().noneMatch(shard -> shard.currentNodeId().equals(excludedRemoteCapableNode)) + ); } } From 411b279e8d8c25ff26a961faacf0ee1fd9d0e612 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 18 Jan 2024 14:06:11 -0500 Subject: [PATCH 103/178] Add @SuppressWarnings("removal") to not fail the build with -Werror for some JDKs (#11923) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- .../src/main/java/org/opensearch/client/sniff/Sniffer.java | 1 + .../common/annotation/processor/CompilerSupport.java | 1 + .../main/java/org/opensearch/nio/ServerChannelContext.java | 1 + .../main/java/org/opensearch/nio/SocketChannelContext.java | 1 + .../org/opensearch/plugins/ExtendedPluginsClassLoader.java | 1 + .../src/main/java/org/opensearch/secure_sm/SecureSM.java | 1 + .../secure_sm/SecuredForkJoinWorkerThreadFactory.java | 1 + .../src/test/java/org/opensearch/secure_sm/SecureSMTests.java | 1 + .../main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java | 3 +++ .../opensearch/script/expression/ExpressionScriptEngine.java | 1 + .../org/opensearch/script/mustache/MustacheScriptEngine.java | 1 + .../java/org/opensearch/painless/spi/AllowlistLoader.java | 1 + .../main/java/org/opensearch/painless/LambdaBootstrap.java | 1 + .../java/org/opensearch/painless/PainlessScriptEngine.java | 1 + .../org/opensearch/painless/lookup/PainlessLookupBuilder.java | 1 + .../java/org/opensearch/painless/DocFieldsPhaseTests.java | 1 + .../org/opensearch/common/blobstore/url/URLBlobContainer.java | 1 + .../src/main/java/org/opensearch/systemd/Libsystemd.java | 1 + .../src/main/java/org/opensearch/crypto/kms/SocketAccess.java | 1 + .../main/java/org/opensearch/discovery/ec2/SocketAccess.java | 1 + .../java/org/opensearch/repositories/azure/SocketAccess.java | 1 + .../java/org/opensearch/repositories/gcs/SocketAccess.java | 1 + .../java/org/opensearch/repositories/s3/SocketAccess.java | 1 + .../org/opensearch/action/admin/ReloadSecureSettingsIT.java | 1 + server/src/main/java/org/opensearch/SpecialPermission.java | 1 + .../main/java/org/opensearch/bootstrap/BootstrapChecks.java | 1 + .../src/main/java/org/opensearch/bootstrap/BootstrapInfo.java | 1 + server/src/main/java/org/opensearch/bootstrap/OpenSearch.java | 1 + .../main/java/org/opensearch/bootstrap/OpenSearchPolicy.java | 1 + .../bootstrap/OpenSearchUncaughtExceptionHandler.java | 1 + server/src/main/java/org/opensearch/bootstrap/Security.java | 1 + .../common/util/concurrent/OpenSearchExecutors.java | 1 + .../opensearch/index/store/remote/utils/TransferManager.java | 1 + server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java | 1 + .../src/main/java/org/opensearch/plugins/PluginSecurity.java | 1 + .../src/main/java/org/opensearch/plugins/PluginsService.java | 1 + .../main/java/org/opensearch/search/lookup/LeafDocLookup.java | 1 + .../java/org/opensearch/bootstrap/OpenSearchPolicyTests.java | 1 + .../src/test/java/org/opensearch/bootstrap/SecurityTests.java | 1 + .../java/org/opensearch/index/shard/StoreRecoveryTests.java | 1 + .../java/org/opensearch/bootstrap/BootstrapForTesting.java | 1 + .../org/opensearch/mockito/plugin/PriviledgedMockMaker.java | 1 + .../src/main/java/org/opensearch/test/FeatureFlagSetter.java | 2 ++ .../java/org/opensearch/test/disruption/LongGCDisruption.java | 4 ++-- 44 files changed, 48 insertions(+), 2 deletions(-) diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java index adddb3bda725c..f609fae4e3c81 100644 --- a/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java +++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/Sniffer.java @@ -305,6 +305,7 @@ public void shutdown() { } } + @SuppressWarnings("removal") static class SnifferThreadFactory implements ThreadFactory { private final AtomicInteger threadNumber = new AtomicInteger(1); private final String namePrefix; diff --git a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java index dcf8dd7945012..c8fdb3333a714 100644 --- a/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java +++ b/libs/common/src/test/java/org/opensearch/common/annotation/processor/CompilerSupport.java @@ -43,6 +43,7 @@ default CompilerResult compile(String name, String... names) { return compileWithPackage(ApiAnnotationProcessorTests.class.getPackageName(), name, names); } + @SuppressWarnings("removal") default CompilerResult compileWithPackage(String pck, String name, String... names) { final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); final DiagnosticCollector<JavaFileObject> collector = new DiagnosticCollector<>(); diff --git a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java index 898ce7e4e913b..ab48cc2357e7f 100644 --- a/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java +++ b/libs/nio/src/main/java/org/opensearch/nio/ServerChannelContext.java @@ -129,6 +129,7 @@ private void configureSocket(ServerSocket socket) throws IOException { socket.setReuseAddress(config.tcpReuseAddress()); } + @SuppressWarnings("removal") protected static SocketChannel accept(ServerSocketChannel serverSocketChannel) throws IOException { try { assert serverSocketChannel.isBlocking() == false; diff --git a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java index 3df8e42fe4f14..530aa1d86afc7 100644 --- a/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java +++ b/libs/nio/src/main/java/org/opensearch/nio/SocketChannelContext.java @@ -388,6 +388,7 @@ private void configureSocket(Socket socket, boolean isConnectComplete) throws IO } } + @SuppressWarnings("removal") private static void connect(SocketChannel socketChannel, InetSocketAddress remoteAddress) throws IOException { try { AccessController.doPrivileged((PrivilegedExceptionAction<Boolean>) () -> socketChannel.connect(remoteAddress)); diff --git a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java index 4a200a5dfa9bd..969fa91b50538 100644 --- a/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java +++ b/libs/plugin-classloader/src/main/java/org/opensearch/plugins/ExtendedPluginsClassLoader.java @@ -65,6 +65,7 @@ protected Class<?> findClass(String name) throws ClassNotFoundException { /** * Return a new classloader across the parent and extended loaders. */ + @SuppressWarnings("removal") public static ExtendedPluginsClassLoader create(ClassLoader parent, List<ClassLoader> extendedLoaders) { return AccessController.doPrivileged( (PrivilegedAction<ExtendedPluginsClassLoader>) () -> new ExtendedPluginsClassLoader(parent, extendedLoaders) diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java index f41c49844997d..a2531f4a9156e 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecureSM.java @@ -76,6 +76,7 @@ * @see <a href="http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html"> * http://cs.oswego.edu/pipermail/concurrency-interest/2009-August/006508.html</a> */ +@SuppressWarnings("removal") public class SecureSM extends SecurityManager { private final String[] classesThatCanExit; diff --git a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java index fe239fea8129e..3c8e78a902fcb 100644 --- a/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java +++ b/libs/secure-sm/src/main/java/org/opensearch/secure_sm/SecuredForkJoinWorkerThreadFactory.java @@ -18,6 +18,7 @@ import java.util.concurrent.ForkJoinPool.ForkJoinWorkerThreadFactory; import java.util.concurrent.ForkJoinWorkerThread; +@SuppressWarnings("removal") public class SecuredForkJoinWorkerThreadFactory implements ForkJoinWorkerThreadFactory { static AccessControlContext contextWithPermissions(Permission... perms) { Permissions permissions = new Permissions(); diff --git a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java index 026ffb080ee61..fd666c70cfebb 100644 --- a/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/opensearch/secure_sm/SecureSMTests.java @@ -41,6 +41,7 @@ import junit.framework.TestCase; /** Simple tests for SecureSM */ +@SuppressWarnings("removal") public class SecureSMTests extends TestCase { static { // install a mock security policy: diff --git a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java index c7cf27b2e6493..b27c0f9fe0b31 100644 --- a/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java +++ b/modules/ingest-geoip/src/main/java/org/opensearch/ingest/geoip/GeoIpProcessor.java @@ -218,6 +218,7 @@ Set<Property> getProperties() { return properties; } + @SuppressWarnings("removal") private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) { SpecialPermission.check(); CityResponse response = AccessController.doPrivileged( @@ -305,6 +306,7 @@ private Map<String, Object> retrieveCityGeoData(InetAddress ipAddress) { return geoData; } + @SuppressWarnings("removal") private Map<String, Object> retrieveCountryGeoData(InetAddress ipAddress) { SpecialPermission.check(); CountryResponse response = AccessController.doPrivileged( @@ -351,6 +353,7 @@ private Map<String, Object> retrieveCountryGeoData(InetAddress ipAddress) { return geoData; } + @SuppressWarnings("removal") private Map<String, Object> retrieveAsnGeoData(InetAddress ipAddress) { SpecialPermission.check(); AsnResponse response = AccessController.doPrivileged( diff --git a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java index 5629b3b4a6972..0520177b72b62 100644 --- a/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/opensearch/script/expression/ExpressionScriptEngine.java @@ -170,6 +170,7 @@ public String getType() { return NAME; } + @SuppressWarnings("removal") @Override public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) { // classloader created here diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java index ec84475b70bb6..842353fdba336 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MustacheScriptEngine.java @@ -128,6 +128,7 @@ private class MustacheExecutableScript extends TemplateScript { this.params = params; } + @SuppressWarnings("removal") @Override public String execute() { final StringWriter writer = new StringWriter(); diff --git a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java index 632fee9187eba..f18a7fb3ba1a9 100644 --- a/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/opensearch/painless/spi/AllowlistLoader.java @@ -513,6 +513,7 @@ public static Allowlist loadFromResourceFiles(Class<?> resource, Map<String, All } } + @SuppressWarnings("removal") ClassLoader loader = AccessController.doPrivileged((PrivilegedAction<ClassLoader>) resource::getClassLoader); return new Allowlist(loader, allowlistClasses, allowlistStatics, allowlistClassBindings, Collections.emptyList()); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java index d0af4651d2d3b..2bf70882a501b 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/LambdaBootstrap.java @@ -501,6 +501,7 @@ private static void endLambdaClass(ClassWriter cw) { * Defines the {@link Class} for the lambda class using the same {@link Compiler.Loader} * that originally defined the class for the Painless script. */ + @SuppressWarnings("removal") private static Class<?> createLambdaClass(Compiler.Loader loader, ClassWriter cw, Type lambdaClassType) { byte[] classBytes = cw.toByteArray(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java index e9edfb73c740c..257687bfb98c5 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java @@ -67,6 +67,7 @@ /** * Implementation of a ScriptEngine for the Painless language. */ +@SuppressWarnings("removal") public final class PainlessScriptEngine implements ScriptEngine { /** diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index e79eda975f417..e155a890c03d1 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -2189,6 +2189,7 @@ private void generateBridgeMethod(PainlessClassBuilder painlessClassBuilder, Pai bridgeClassWriter.visitEnd(); try { + @SuppressWarnings("removal") BridgeLoader bridgeLoader = AccessController.doPrivileged(new PrivilegedAction<BridgeLoader>() { @Override public BridgeLoader run() { diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java index 3418bcf01e19f..691e84176dce3 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DocFieldsPhaseTests.java @@ -48,6 +48,7 @@ public class DocFieldsPhaseTests extends ScriptTestCase { PainlessLookup lookup = PainlessLookupBuilder.buildFromAllowlists(Allowlist.BASE_ALLOWLISTS); + @SuppressWarnings("removal") ScriptScope compile(String script) { Compiler compiler = new Compiler( MockDocTestScript.CONTEXT.instanceClazz, diff --git a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java index b13a4d5a39a5b..02e858cb8d1f2 100644 --- a/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java +++ b/modules/repository-url/src/main/java/org/opensearch/common/blobstore/url/URLBlobContainer.java @@ -157,6 +157,7 @@ public void writeBlobAtomic(String blobName, InputStream inputStream, long blobS throw new UnsupportedOperationException("URL repository doesn't support this operation"); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "We call connect in doPrivileged and provide SocketPermission") private static InputStream getInputStream(URL url) throws IOException { try { diff --git a/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java b/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java index 93e2e28718d51..05c6222d3d89a 100644 --- a/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java +++ b/modules/systemd/src/main/java/org/opensearch/systemd/Libsystemd.java @@ -40,6 +40,7 @@ /** * Provides access to the native method sd_notify from libsystemd. */ +@SuppressWarnings("removal") class Libsystemd { static { diff --git a/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java b/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java index 5b026c30017ca..f3d0f278c7ce7 100644 --- a/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java +++ b/plugins/crypto-kms/src/main/java/org/opensearch/crypto/kms/SocketAccess.java @@ -19,6 +19,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java index c6605002c4462..0125ae4d19c3e 100644 --- a/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java +++ b/plugins/discovery-ec2/src/main/java/org/opensearch/discovery/ec2/SocketAccess.java @@ -46,6 +46,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java index 0fbe9797f726f..a206c3b883870 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/SocketAccess.java @@ -49,6 +49,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java index 35127d6ea4060..f8c451749480b 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/SocketAccess.java @@ -48,6 +48,7 @@ * needs {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access * in {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") final class SocketAccess { private SocketAccess() {} diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java index 4888764dbc720..f88aa46e61806 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/SocketAccess.java @@ -46,6 +46,7 @@ * {@link SocketPermission} 'connect' to establish connections. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class SocketAccess { private SocketAccess() {} diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java index 5605e4872887a..c81d491719e4b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/ReloadSecureSettingsIT.java @@ -449,6 +449,7 @@ public void onFailure(Exception e) { } } + @SuppressWarnings("removal") private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); try { diff --git a/server/src/main/java/org/opensearch/SpecialPermission.java b/server/src/main/java/org/opensearch/SpecialPermission.java index 8a694d4543f32..8348f0844acc6 100644 --- a/server/src/main/java/org/opensearch/SpecialPermission.java +++ b/server/src/main/java/org/opensearch/SpecialPermission.java @@ -98,6 +98,7 @@ public SpecialPermission(String name, String actions) { /** * Check that the current stack has {@link SpecialPermission} access according to the {@link SecurityManager}. */ + @SuppressWarnings("removal") public static void check() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java index e43c42446de2c..485dd43a5999c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java @@ -718,6 +718,7 @@ public final BootstrapCheckResult check(BootstrapContext context) { return BootstrapCheckResult.success(); } + @SuppressWarnings("removal") boolean isAllPermissionGranted() { final SecurityManager sm = System.getSecurityManager(); assert sm != null; diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java index 0aa965ce46096..52dd5d710eedc 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapInfo.java @@ -128,6 +128,7 @@ public Object remove(Object key) { /** * Returns a read-only view of all system properties */ + @SuppressWarnings("removal") public static Dictionary<Object, Object> getSystemProperties() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java index 4d36efff0e192..8eb4f841b9671 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearch.java @@ -83,6 +83,7 @@ class OpenSearch extends EnvironmentAwareCommand { /** * Main entry point for starting opensearch */ + @SuppressWarnings("removal") public static void main(final String[] args) throws Exception { overrideDnsCachePolicyProperties(); /* diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java index 14435db64274c..4571eb35ca93c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearchPolicy.java @@ -53,6 +53,7 @@ * * @opensearch.internal **/ +@SuppressWarnings("removal") final class OpenSearchPolicy extends Policy { /** template policy file, the one used in tests */ diff --git a/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java b/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java index 2b28260097ce1..5f9a01436b4cb 100644 --- a/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java +++ b/server/src/main/java/org/opensearch/bootstrap/OpenSearchUncaughtExceptionHandler.java @@ -98,6 +98,7 @@ void onNonFatalUncaught(final String threadName, final Throwable t) { Terminal.DEFAULT.flush(); } + @SuppressWarnings("removal") void halt(int status) { AccessController.doPrivileged(new PrivilegedHaltAction(status)); } diff --git a/server/src/main/java/org/opensearch/bootstrap/Security.java b/server/src/main/java/org/opensearch/bootstrap/Security.java index 749c146de4f16..a48bbd61016e3 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Security.java +++ b/server/src/main/java/org/opensearch/bootstrap/Security.java @@ -119,6 +119,7 @@ * * @opensearch.internal */ +@SuppressWarnings("removal") final class Security { /** no instantiation */ private Security() {} diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java index ec1024bbe5f30..c95fb5b1044cc 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java @@ -393,6 +393,7 @@ static class OpenSearchThreadFactory implements ThreadFactory { final AtomicInteger threadNumber = new AtomicInteger(1); final String namePrefix; + @SuppressWarnings("removal") OpenSearchThreadFactory(String namePrefix) { this.namePrefix = namePrefix; SecurityManager s = System.getSecurityManager(); diff --git a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java index c9469283ee921..9250e73e08509 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java +++ b/server/src/main/java/org/opensearch/index/store/remote/utils/TransferManager.java @@ -75,6 +75,7 @@ public IndexInput fetchBlob(BlobFetchRequest blobFetchRequest) throws IOExceptio } } + @SuppressWarnings("removal") private static FileCachedIndexInput createIndexInput(FileCache fileCache, BlobContainer blobContainer, BlobFetchRequest request) { // We need to do a privileged action here in order to fetch from remote // and write to the local file cache in case this is invoked as a side diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java index dc27ab0fb91c2..e3290bfec6905 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmInfo.java @@ -216,6 +216,7 @@ private static boolean usingBundledJdkOrJre() { } } + @SuppressWarnings("removal") public static JvmInfo jvmInfo() { SecurityManager sm = System.getSecurityManager(); if (sm != null) { diff --git a/server/src/main/java/org/opensearch/plugins/PluginSecurity.java b/server/src/main/java/org/opensearch/plugins/PluginSecurity.java index e7d92016d4082..1bf8642d1112f 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginSecurity.java +++ b/server/src/main/java/org/opensearch/plugins/PluginSecurity.java @@ -135,6 +135,7 @@ static String formatPermission(Permission permission) { /** * Parses plugin policy into a set of permissions. Each permission is formatted for output to users. */ + @SuppressWarnings("removal") public static Set<String> parsePermissions(Path file, Path tmpDir) throws IOException { // create a zero byte file for "comparison" // this is necessary because the default policy impl automatically grants two permissions: diff --git a/server/src/main/java/org/opensearch/plugins/PluginsService.java b/server/src/main/java/org/opensearch/plugins/PluginsService.java index cc9cc5b5b5fbf..590b70d3bfc53 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginsService.java +++ b/server/src/main/java/org/opensearch/plugins/PluginsService.java @@ -682,6 +682,7 @@ static void checkBundleJarHell(Set<URL> classpath, Bundle bundle, Map<String, Se } } + @SuppressWarnings("removal") private Plugin loadBundle(Bundle bundle, Map<String, Plugin> loaded) { String name = bundle.plugin.getName(); diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java index 3bd1c5118b5fb..70cbd8d7ad6c3 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafDocLookup.java @@ -78,6 +78,7 @@ public void setDocument(int docId) { this.docId = docId; } + @SuppressWarnings("removal") @Override public ScriptDocValues<?> get(Object key) { // assume its a string... diff --git a/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java b/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java index b1e27ea9c66e3..2b4d2a755f543 100644 --- a/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyTests.java @@ -49,6 +49,7 @@ public class OpenSearchPolicyTests extends OpenSearchTestCase { /** * test restricting privileges to no permissions actually works */ + @SuppressWarnings("removal") public void testRestrictPrivileges() { assumeTrue("test requires security manager", System.getSecurityManager() != null); try { diff --git a/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java b/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java index ea4ef96ec0f77..69e561bb8fd89 100644 --- a/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/SecurityTests.java @@ -72,6 +72,7 @@ public void testEnsureRegularFile() throws IOException { } /** can't execute processes */ + @SuppressWarnings("removal") public void testProcessExecution() throws Exception { assumeTrue("test requires security manager", System.getSecurityManager() != null); try { diff --git a/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java b/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java index c1a51bb780f61..846b975a9520e 100644 --- a/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java +++ b/server/src/test/java/org/opensearch/index/shard/StoreRecoveryTests.java @@ -270,6 +270,7 @@ public void testStatsDirWrapper() throws IOException { IOUtils.close(dir, target); } + @SuppressWarnings("removal") public boolean hardLinksSupported(Path path) throws IOException { try { Files.createFile(path.resolve("foo.bar")); diff --git a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java index 43881d0660e04..933385dedcf49 100644 --- a/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/opensearch/bootstrap/BootstrapForTesting.java @@ -84,6 +84,7 @@ * The idea is to mimic as much as possible what happens with ES in production * mode (e.g. assign permissions and install security manager the same way) */ +@SuppressWarnings("removal") public class BootstrapForTesting { // TODO: can we share more code with the non-test side here diff --git a/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java b/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java index 0f5e043ee1135..cc2d26a598757 100644 --- a/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java +++ b/test/framework/src/main/java/org/opensearch/mockito/plugin/PriviledgedMockMaker.java @@ -31,6 +31,7 @@ * Mockito plugin which wraps the Mockito calls into priviledged execution blocks and respects * SecurityManager presence. */ +@SuppressWarnings("removal") @SuppressForbidden(reason = "allow URL#getFile() to be used in tests") public class PriviledgedMockMaker implements MockMaker { private static AccessControlContext context; diff --git a/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java b/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java index eddcf9c738bb3..f698cd03c464f 100644 --- a/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java +++ b/test/framework/src/main/java/org/opensearch/test/FeatureFlagSetter.java @@ -46,6 +46,7 @@ public static synchronized void clear() { private static final Logger LOGGER = LogManager.getLogger(FeatureFlagSetter.class); private final Set<String> flags = ConcurrentCollections.newConcurrentSet(); + @SuppressWarnings("removal") @SuppressForbidden(reason = "Enables setting of feature flags") private void setFlag(String flag) { flags.add(flag); @@ -53,6 +54,7 @@ private void setFlag(String flag) { LOGGER.info("set feature_flag={}", flag); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "Clears the set feature flags") private void clearAll() { for (String flag : flags) { diff --git a/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java b/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java index 44837c37962b4..168fbd5bd0d0a 100644 --- a/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java +++ b/test/framework/src/main/java/org/opensearch/test/disruption/LongGCDisruption.java @@ -252,7 +252,7 @@ public TimeValue expectedTimeToHeal() { * returns true if some live threads were found. The caller is expected to call this method * until no more "live" are found. */ - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally + @SuppressWarnings({ "deprecation", "removal" }) // suspends/resumes threads intentionally @SuppressForbidden(reason = "suspends/resumes threads intentionally") protected boolean suspendThreads(Set<Thread> nodeThreads) { Thread[] allThreads = null; @@ -360,7 +360,7 @@ protected void onBlockDetected(ThreadInfo blockedThread, @Nullable ThreadInfo bl ); } - @SuppressWarnings("deprecation") // suspends/resumes threads intentionally + @SuppressWarnings({ "deprecation", "removal" }) // suspends/resumes threads intentionally @SuppressForbidden(reason = "suspends/resumes threads intentionally") protected void resumeThreads(Set<Thread> threads) { for (Thread thread : threads) { From 6dee9551738ab47c513d52c43b9d48da7b8df54d Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 18 Jan 2024 15:34:51 -0500 Subject: [PATCH 104/178] Add @SuppressWarnings("removal") to not fail the build with -Werror for some JDKs (#11931) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- .../org/opensearch/test/rest/JsonLogsFormatAndParseIT.java | 1 + .../azure/classic/AzureDiscoveryClusterFormationTests.java | 1 + .../azure/classic/management/AzureComputeServiceImpl.java | 1 + .../java/org/opensearch/discovery/ec2/Ec2NetworkTests.java | 2 ++ .../src/main/java/org/opensearch/cloud/gce/util/Access.java | 1 + .../main/java/org/opensearch/ingest/attachment/TikaImpl.java | 2 ++ .../opensearch/repositories/azure/AzureStorageService.java | 1 + .../gcs/GoogleCloudStorageRetryingInputStream.java | 1 + .../java/org/opensearch/repositories/hdfs/HdfsPlugin.java | 1 + .../java/org/opensearch/repositories/hdfs/HdfsRepository.java | 1 + .../org/opensearch/repositories/hdfs/HdfsSecurityContext.java | 1 + .../repositories/hdfs/HaHdfsFailoverTestSuiteIT.java | 1 + .../repositories/hdfs/HdfsBlobStoreContainerTests.java | 1 + .../repositories/s3/RepositoryCredentialsTests.java | 1 + .../java/org/opensearch/telemetry/OTelTelemetrySettings.java | 4 ++-- .../opensearch/telemetry/metrics/OTelMetricsTelemetry.java | 2 ++ .../metrics/exporter/OTelMetricsExporterFactory.java | 1 + .../opensearch/telemetry/tracing/OTelResourceProvider.java | 1 + .../telemetry/tracing/exporter/OTelSpanExporterFactory.java | 1 + .../org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java | 1 + .../java/org/opensearch/bootstrap/SystemCallFilterTests.java | 1 + .../test/java/org/opensearch/plugins/PluginSecurityTests.java | 1 + .../opensearch/qa/custom_logging/CustomLoggingConfigIT.java | 1 + .../unconfigured_node_name/JsonLogsFormatAndParseIT.java | 1 + 24 files changed, 28 insertions(+), 2 deletions(-) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java index 88f667549f3e8..faef1441d0a02 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/JsonLogsFormatAndParseIT.java @@ -51,6 +51,7 @@ protected Matcher<String> nodeNameMatcher() { return is("integTest-0"); } + @SuppressWarnings("removal") @Override protected BufferedReader openReader(Path logFile) { assumeFalse("Skipping test because it is being run against an external cluster.", diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index ad622e68f5ccb..a4b733ec7d894 100644 --- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -296,6 +296,7 @@ private static SSLContext getSSLContext() throws Exception { * The {@link HttpsServer} in the JDK has issues with TLSv1.3 when running in a JDK prior to * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK */ + @SuppressWarnings("removal") private static String getProtocol() { if (Runtime.version().compareTo(Version.parse("12")) < 0) { return "TLSv1.2"; diff --git a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java index 1bac80e576199..6e21feca7f5fb 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java +++ b/plugins/discovery-azure-classic/src/main/java/org/opensearch/cloud/azure/classic/management/AzureComputeServiceImpl.java @@ -112,6 +112,7 @@ private static String getRequiredSetting(Settings settings, Setting<String> sett return value; } + @SuppressWarnings("removal") @Override public HostedServiceGetDetailedResponse getServiceDetails() { SpecialPermission.check(); diff --git a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java index b4ed613c0d8dd..9518fac442111 100644 --- a/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java +++ b/plugins/discovery-ec2/src/test/java/org/opensearch/discovery/ec2/Ec2NetworkTests.java @@ -89,6 +89,7 @@ public static void startHttp() throws Exception { httpServer.start(); } + @SuppressWarnings("removal") @Before public void setup() { // redirect EC2 metadata service to httpServer @@ -116,6 +117,7 @@ public void testNetworkHostEc2() throws IOException { /** * Test for network.host: _ec2_ */ + @SuppressWarnings("removal") public void testNetworkHostUnableToResolveEc2() { // redirect EC2 metadata service to unknown location AccessController.doPrivileged( diff --git a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java index 1401f7ca26ce6..c46bfedbd8507 100644 --- a/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java +++ b/plugins/discovery-gce/src/main/java/org/opensearch/cloud/gce/util/Access.java @@ -48,6 +48,7 @@ * {@code connect}. This class wraps the operations requiring access in * {@link AccessController#doPrivileged(PrivilegedAction)} blocks. */ +@SuppressWarnings("removal") public final class Access { private Access() {} diff --git a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java index ce7ceb5e3d776..fe783e5ddb675 100644 --- a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java @@ -112,6 +112,7 @@ final class TikaImpl { /** * parses with tika, throwing any exception hit while parsing the document */ + @SuppressWarnings("removal") static String parse(final byte content[], final Metadata metadata, final int limit) throws TikaException, IOException { // check that its not unprivileged code like a script SpecialPermission.check(); @@ -136,6 +137,7 @@ static String parse(final byte content[], final Metadata metadata, final int lim // apply additional containment for parsers, this is intersected with the current permissions // its hairy, but worth it so we don't have some XML flaw reading random crap from the FS + @SuppressWarnings("removal") private static final AccessControlContext RESTRICTED_CONTEXT = new AccessControlContext( new ProtectionDomain[] { new ProtectionDomain(null, getRestrictedPermissions()) } ); diff --git a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java index b60701ba5e533..74edd4f3eb23c 100644 --- a/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/opensearch/repositories/azure/AzureStorageService.java @@ -389,6 +389,7 @@ private static class NioThreadFactory implements ThreadFactory { private final AtomicInteger threadNumber = new AtomicInteger(1); private final String namePrefix; + @SuppressWarnings("removal") NioThreadFactory() { SecurityManager s = System.getSecurityManager(); group = (s != null) ? s.getThreadGroup() : Thread.currentThread().getThreadGroup(); diff --git a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java index e15b37f209c5f..620f8e98d5f20 100644 --- a/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java +++ b/plugins/repository-gcs/src/main/java/org/opensearch/repositories/gcs/GoogleCloudStorageRetryingInputStream.java @@ -106,6 +106,7 @@ class GoogleCloudStorageRetryingInputStream extends InputStream { currentStream = openStream(); } + @SuppressWarnings("removal") @SuppressForbidden(reason = "need access to storage client") private static com.google.api.services.storage.Storage getStorage(Storage client) { return AccessController.doPrivileged((PrivilegedAction<com.google.api.services.storage.Storage>) () -> { diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java index 119d060374be2..af49cd3c579e6 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsPlugin.java @@ -52,6 +52,7 @@ import java.util.Collections; import java.util.Map; +@SuppressWarnings("removal") public final class HdfsPlugin extends Plugin implements RepositoryPlugin { // initialize some problematic classes with elevated privileges diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java index f0ffec5713c1d..4b38e62b2525a 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java @@ -254,6 +254,7 @@ private static String getHostName() { } } + @SuppressWarnings("removal") @Override protected HdfsBlobStore createBlobStore() { // initialize our blobstore using elevated privileges. diff --git a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java index 07d1d29eecfc4..5a27eb937ff9c 100644 --- a/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java +++ b/plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsSecurityContext.java @@ -57,6 +57,7 @@ * Keeps track of the current user for a given repository, as well as which * permissions to grant the blob store restricted execution methods. */ +@SuppressWarnings("removal") class HdfsSecurityContext { private static final Permission[] SIMPLE_AUTH_PERMISSIONS; diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index d0b63f17e3887..89ba8d51cf7f7 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -62,6 +62,7 @@ /** * Integration test that runs against an HA-Enabled HDFS instance */ +@SuppressWarnings("removal") public class HaHdfsFailoverTestSuiteIT extends OpenSearchRestTestCase { public void testHAFailoverWithRepository() throws Exception { diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java index 3a6eb0e205ccb..5f7454df4ecfc 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java @@ -64,6 +64,7 @@ @ThreadLeakFilters(filters = { HdfsClientThreadLeakFilter.class }) public class HdfsBlobStoreContainerTests extends OpenSearchTestCase { + @SuppressWarnings("removal") private FileContext createTestContext() { FileContext fileContext; try { diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java index 8e1926d40302f..f84d953baae8e 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/RepositoryCredentialsTests.java @@ -75,6 +75,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +@SuppressWarnings("removal") @SuppressForbidden(reason = "test requires to set a System property to allow insecure settings when running in IDE") public class RepositoryCredentialsTests extends OpenSearchSingleNodeTestCase implements ConfigPathSupport { diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java index 8e23f724b4570..b31ed320d737e 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/OTelTelemetrySettings.java @@ -66,7 +66,7 @@ private OTelTelemetrySettings() {} /** * Span Exporter type setting. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "removal" }) public static final Setting<Class<SpanExporter>> OTEL_TRACER_SPAN_EXPORTER_CLASS_SETTING = new Setting<>( "telemetry.otel.tracer.span.exporter.class", LoggingSpanExporter.class.getName(), @@ -90,7 +90,7 @@ private OTelTelemetrySettings() {} /** * Metrics Exporter type setting. */ - @SuppressWarnings("unchecked") + @SuppressWarnings({ "unchecked", "removal" }) public static final Setting<Class<MetricExporter>> OTEL_METRICS_EXPORTER_CLASS_SETTING = new Setting<>( "telemetry.otel.metrics.exporter.class", LoggingMetricExporter.class.getName(), diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java index 6160e5106c041..af235b9604669 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java @@ -42,6 +42,7 @@ public OTelMetricsTelemetry(RefCountedReleasable<OpenTelemetrySdk> openTelemetry this.otelMeter = meterProvider.get(OTelTelemetryPlugin.INSTRUMENTATION_SCOPE_NAME); } + @SuppressWarnings("removal") @Override public Counter createCounter(String name, String description, String unit) { DoubleCounter doubleCounter = AccessController.doPrivileged( @@ -54,6 +55,7 @@ public Counter createCounter(String name, String description, String unit) { return new OTelCounter(doubleCounter); } + @SuppressWarnings("removal") @Override public Counter createUpDownCounter(String name, String description, String unit) { DoubleUpDownCounter doubleUpDownCounter = AccessController.doPrivileged( diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java index ef5a31e4003ca..9c548044484fd 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/exporter/OTelMetricsExporterFactory.java @@ -51,6 +51,7 @@ public static MetricExporter create(Settings settings) { return metricExporter; } + @SuppressWarnings("removal") private static MetricExporter instantiateExporter(Class<MetricExporter> exporterProviderClass) { try { // Check we ourselves are not being called by unprivileged code. diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java index 14a19f122c17b..b1a45f2e7c2d2 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java @@ -48,6 +48,7 @@ private OTelResourceProvider() {} * @param settings cluster settings * @return OpenTelemetrySdk instance */ + @SuppressWarnings("removal") public static OpenTelemetrySdk get(TelemetrySettings telemetrySettings, Settings settings) { return AccessController.doPrivileged( (PrivilegedAction<OpenTelemetrySdk>) () -> get( diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java index da7ce5c47d9ca..e9d7e78882c7d 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/exporter/OTelSpanExporterFactory.java @@ -51,6 +51,7 @@ public static SpanExporter create(Settings settings) { return spanExporter; } + @SuppressWarnings("removal") private static SpanExporter instantiateSpanExporter(Class<SpanExporter> spanExporterProviderClass) { try { // Check we ourselves are not being called by unprivileged code. diff --git a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java index db532f9a1c503..4ea23e415c994 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/OpenSearchPolicyUnitTests.java @@ -50,6 +50,7 @@ * Unit tests for OpenSearchPolicy: these cannot run with security manager, * we don't allow messing with the policy */ +@SuppressWarnings("removal") public class OpenSearchPolicyUnitTests extends OpenSearchTestCase { /** * Test policy with null codesource. diff --git a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java index 56d6c72705a78..99c9ee7e96d01 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/bootstrap/SystemCallFilterTests.java @@ -41,6 +41,7 @@ public class SystemCallFilterTests extends OpenSearchTestCase { /** command to try to run in tests */ static final String EXECUTABLE = Constants.WINDOWS ? "calc" : "ls"; + @SuppressWarnings("removal") @Override public void setUp() throws Exception { super.setUp(); diff --git a/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java b/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java index 430df1f899708..04eae95f6fe12 100644 --- a/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java +++ b/qa/evil-tests/src/test/java/org/opensearch/plugins/PluginSecurityTests.java @@ -41,6 +41,7 @@ import static org.hamcrest.Matchers.containsInAnyOrder; /** Tests plugin manager security check */ +@SuppressWarnings("removal") public class PluginSecurityTests extends OpenSearchTestCase { /** Test that we can parse the set of permissions correctly for a simple policy */ diff --git a/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java b/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java index 73c546b80d431..c2f799d7d48d2 100644 --- a/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java +++ b/qa/logging-config/src/test/java/org/opensearch/qa/custom_logging/CustomLoggingConfigIT.java @@ -62,6 +62,7 @@ public void testSuccessfulStartupWithCustomConfig() throws Exception { }); } + @SuppressWarnings("removal") private List<String> readAllLines(Path logFile) { return AccessController.doPrivileged((PrivilegedAction<List<String>>) () -> { try { diff --git a/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java b/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java index d14c834405f32..8a6e5d62112c8 100644 --- a/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java +++ b/qa/unconfigured-node-name/src/test/java/org/opensearch/unconfigured_node_name/JsonLogsFormatAndParseIT.java @@ -61,6 +61,7 @@ protected Matcher<String> nodeNameMatcher() { return equalTo(HOSTNAME); } + @SuppressWarnings("removal") @Override protected BufferedReader openReader(Path logFile) { return AccessController.doPrivileged((PrivilegedAction<BufferedReader>) () -> { From c267c5affa52bb3d176b81f87c9175090f7469e5 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 18 Jan 2024 15:52:21 -0500 Subject: [PATCH 105/178] Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase (#11877) * Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Branched off parametrized tests into static / dynamic settings flavors Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Refactor internalCluster() method to return an Optional value Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --------- Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 1 + .../common/QueryStringWithAnalyzersIT.java | 8 +- .../common/HighlighterWithAnalyzersTests.java | 8 +- .../geo/GeoModulePluginIntegTestCase.java | 8 +- .../opensearch/geo/search/MissingValueIT.java | 4 +- .../script/expression/MoreExpressionIT.java | 8 +- .../script/expression/StoredExpressionIT.java | 8 +- .../mustache/MultiSearchTemplateIT.java | 8 +- .../join/query/ParentChildTestCase.java | 8 +- .../percolator/PercolatorQuerySearchIT.java | 8 +- .../index/rankeval/RankEvalRequestIT.java | 8 +- .../node/tasks/CancellableTasksIT.java | 8 +- .../action/termvectors/GetTermVectorsIT.java | 4 +- .../termvectors/MultiTermVectorsIT.java | 4 +- .../org/opensearch/index/IndexSortIT.java | 8 +- .../index/search/MatchPhraseQueryIT.java | 8 +- .../index/suggest/stats/SuggestStatsIT.java | 8 +- .../indices/IndicesRequestCacheIT.java | 4 +- .../breaker/CircuitBreakerServiceIT.java | 8 +- .../indices/stats/IndexStatsIT.java | 4 +- .../org/opensearch/mget/SimpleMgetIT.java | 8 +- .../org/opensearch/script/ScriptCacheIT.java | 4 +- .../search/SearchCancellationIT.java | 4 +- .../opensearch/search/SearchTimeoutIT.java | 4 +- .../search/SearchWithRejectionsIT.java | 8 +- .../search/StressSearchServiceReaperIT.java | 4 +- .../AggregationsIntegrationIT.java | 8 +- .../search/aggregations/CombiIT.java | 8 +- .../search/aggregations/EquivalenceIT.java | 8 +- .../search/aggregations/MetadataIT.java | 8 +- .../search/aggregations/MissingValueIT.java | 8 +- .../bucket/AdjacencyMatrixIT.java | 4 +- .../aggregations/bucket/BooleanTermsIT.java | 4 +- .../aggregations/bucket/DateHistogramIT.java | 8 +- .../bucket/DateHistogramOffsetIT.java | 8 +- .../aggregations/bucket/DateRangeIT.java | 8 +- .../bucket/DiversifiedSamplerIT.java | 8 +- .../aggregations/bucket/DoubleTermsIT.java | 4 +- .../search/aggregations/bucket/FilterIT.java | 8 +- .../search/aggregations/bucket/FiltersIT.java | 4 +- .../aggregations/bucket/GeoDistanceIT.java | 8 +- .../search/aggregations/bucket/GlobalIT.java | 8 +- .../aggregations/bucket/HistogramIT.java | 8 +- .../search/aggregations/bucket/IpRangeIT.java | 8 +- .../search/aggregations/bucket/IpTermsIT.java | 4 +- .../aggregations/bucket/LongTermsIT.java | 4 +- .../aggregations/bucket/MinDocCountIT.java | 4 +- .../aggregations/bucket/MultiTermsIT.java | 4 +- .../aggregations/bucket/NaNSortingIT.java | 8 +- .../search/aggregations/bucket/NestedIT.java | 8 +- .../search/aggregations/bucket/RangeIT.java | 8 +- .../aggregations/bucket/ReverseNestedIT.java | 8 +- .../search/aggregations/bucket/SamplerIT.java | 8 +- .../aggregations/bucket/ShardReduceIT.java | 8 +- .../aggregations/bucket/ShardSizeTermsIT.java | 4 +- .../SignificantTermsSignificanceScoreIT.java | 8 +- .../bucket/TermsDocCountErrorIT.java | 8 +- .../bucket/TermsFixedDocCountErrorIT.java | 8 +- .../bucket/TermsShardMinDocCountIT.java | 8 +- .../bucket/terms/BaseStringTermsTestCase.java | 4 +- .../bucket/terms/StringTermsIT.java | 4 +- .../aggregations/metrics/CardinalityIT.java | 8 +- .../CardinalityWithRequestBreakerIT.java | 8 +- .../aggregations/metrics/ExtendedStatsIT.java | 4 +- .../aggregations/metrics/GeoCentroidIT.java | 4 +- .../metrics/HDRPercentileRanksIT.java | 4 +- .../metrics/HDRPercentilesIT.java | 4 +- .../metrics/MedianAbsoluteDeviationIT.java | 4 +- .../metrics/ScriptedMetricIT.java | 8 +- .../search/aggregations/metrics/StatsIT.java | 4 +- .../search/aggregations/metrics/SumIT.java | 4 +- .../metrics/TDigestPercentileRanksIT.java | 4 +- .../metrics/TDigestPercentilesIT.java | 4 +- .../aggregations/metrics/TopHitsIT.java | 8 +- .../aggregations/metrics/ValueCountIT.java | 8 +- .../aggregations/pipeline/AvgBucketIT.java | 8 +- .../aggregations/pipeline/BucketScriptIT.java | 8 +- .../pipeline/BucketSelectorIT.java | 8 +- .../aggregations/pipeline/BucketSortIT.java | 8 +- .../pipeline/DateDerivativeIT.java | 8 +- .../aggregations/pipeline/DerivativeIT.java | 4 +- .../pipeline/ExtendedStatsBucketIT.java | 8 +- .../aggregations/pipeline/MaxBucketIT.java | 8 +- .../aggregations/pipeline/MinBucketIT.java | 8 +- .../aggregations/pipeline/MovAvgIT.java | 8 +- .../pipeline/PercentilesBucketIT.java | 8 +- .../aggregations/pipeline/SerialDiffIT.java | 8 +- .../aggregations/pipeline/StatsBucketIT.java | 8 +- .../aggregations/pipeline/SumBucketIT.java | 8 +- .../backpressure/SearchBackpressureIT.java | 8 +- .../search/basic/SearchRedStateIndexIT.java | 8 +- .../basic/SearchWhileCreatingIndexIT.java | 8 +- .../search/basic/SearchWhileRelocatingIT.java | 8 +- .../basic/SearchWithRandomExceptionsIT.java | 8 +- .../basic/SearchWithRandomIOExceptionsIT.java | 8 +- .../basic/TransportSearchFailuresIT.java | 8 +- .../basic/TransportTwoNodesSearchIT.java | 8 +- .../search/fetch/FetchSubPhasePluginIT.java | 8 +- .../search/fetch/subphase/InnerHitsIT.java | 8 +- .../fetch/subphase/MatchedQueriesIT.java | 8 +- .../highlight/CustomHighlighterSearchIT.java | 8 +- .../highlight/HighlighterSearchIT.java | 8 +- .../search/fieldcaps/FieldCapabilitiesIT.java | 8 +- .../search/fields/SearchFieldsIT.java | 8 +- .../functionscore/DecayFunctionScoreIT.java | 8 +- .../functionscore/ExplainableScriptIT.java | 8 +- .../FunctionScoreFieldValueIT.java | 8 +- .../search/functionscore/FunctionScoreIT.java | 8 +- .../functionscore/FunctionScorePluginIT.java | 8 +- .../search/functionscore/QueryRescorerIT.java | 8 +- .../functionscore/RandomScoreFunctionIT.java | 8 +- .../opensearch/search/geo/GeoFilterIT.java | 8 +- .../opensearch/search/geo/GeoPolygonIT.java | 8 +- .../search/geo/GeoShapeIntegrationIT.java | 8 +- .../geo/LegacyGeoShapeIntegrationIT.java | 8 +- .../search/morelikethis/MoreLikeThisIT.java | 8 +- .../search/msearch/MultiSearchIT.java | 8 +- .../search/nested/SimpleNestedIT.java | 8 +- .../opensearch/search/pit/PitMultiNodeIT.java | 4 +- .../search/preference/SearchPreferenceIT.java | 8 +- .../aggregation/AggregationProfilerIT.java | 8 +- .../search/profile/query/QueryProfilerIT.java | 4 +- .../org/opensearch/search/query/ExistsIT.java | 8 +- .../search/query/MultiMatchQueryIT.java | 8 +- .../search/query/QueryStringIT.java | 8 +- .../search/query/ScriptScoreQueryIT.java | 8 +- .../search/query/SearchQueryIT.java | 8 +- .../search/query/SimpleQueryStringIT.java | 8 +- .../scriptfilter/ScriptQuerySearchIT.java | 4 +- .../search/scroll/DuelScrollIT.java | 4 +- .../search/scroll/SearchScrollIT.java | 4 +- .../SearchScrollWithFailingNodesIT.java | 4 +- .../search/searchafter/SearchAfterIT.java | 4 +- .../search/simple/SimpleSearchIT.java | 4 +- .../search/slice/SearchSliceIT.java | 8 +- .../opensearch/search/sort/FieldSortIT.java | 4 +- .../opensearch/search/sort/GeoDistanceIT.java | 8 +- .../search/sort/GeoDistanceSortBuilderIT.java | 4 +- .../opensearch/search/sort/SimpleSortIT.java | 8 +- .../search/sort/SortFromPluginIT.java | 4 +- .../search/source/MetadataFetchingIT.java | 8 +- .../search/source/SourceFetchingIT.java | 8 +- .../search/stats/SearchStatsIT.java | 8 +- .../suggest/CompletionSuggestSearchIT.java | 4 +- .../ContextCompletionSuggestSearchIT.java | 4 +- .../search/suggest/SuggestSearchIT.java | 4 +- .../opensearch/similarity/SimilarityIT.java | 4 +- .../AbstractTermVectorsTestCase.java | 8 +- .../bucket/ShardSizeTestCase.java | 4 +- .../metrics/AbstractGeoTestCase.java | 4 +- .../bucket/AbstractTermsTestCase.java | 4 +- .../metrics/AbstractNumericTestCase.java | 8 +- .../test/OpenSearchIntegTestCase.java | 307 +------------- .../test/OpenSearchTestClusterRule.java | 397 ++++++++++++++++++ ...ynamicSettingsOpenSearchIntegTestCase.java | 66 +++ .../ParameterizedOpenSearchIntegTestCase.java | 57 ++- ...StaticSettingsOpenSearchIntegTestCase.java | 53 +++ 157 files changed, 1068 insertions(+), 829 deletions(-) create mode 100644 test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java create mode 100644 test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java create mode 100644 test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java diff --git a/CHANGELOG.md b/CHANGELOG.md index e685f76770cb7..be2a639892aa9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -205,6 +205,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) - Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) - Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) +- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)) ### Deprecated diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index 71af708f2e1dc..3f9e15005fabb 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -39,7 +39,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.Operator; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,10 +49,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; -public class QueryStringWithAnalyzersIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryStringWithAnalyzersIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public QueryStringWithAnalyzersIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryStringWithAnalyzersIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index 26f4acb2b1e6a..3e93060eaaa90 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -44,7 +44,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -68,10 +68,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; -public class HighlighterWithAnalyzersTests extends ParameterizedOpenSearchIntegTestCase { +public class HighlighterWithAnalyzersTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public HighlighterWithAnalyzersTests(Settings dynamicSettings) { - super(dynamicSettings); + public HighlighterWithAnalyzersTests(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index c38b29502e282..65fadd211050d 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -16,7 +16,7 @@ import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.TestGeoShapeFieldMapperPlugin; import java.util.Arrays; @@ -29,14 +29,14 @@ * This is the base class for all the Geo related integration tests. Use this class to add the features and settings * for the test cluster on which integration tests are running. */ -public abstract class GeoModulePluginIntegTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class GeoModulePluginIntegTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static final double GEOHASH_TOLERANCE = 1E-5D; protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true)); - public GeoModulePluginIntegTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public GeoModulePluginIntegTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java index 7344903fd5220..9e7ce0d3c7980 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java @@ -44,8 +44,8 @@ public class MissingValueIT extends GeoModulePluginIntegTestCase { private GeoPoint bottomRight; private GeoPoint topLeft; - public MissingValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public MissingValueIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 8ca28a905f216..6a1d9d57dd435 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -57,7 +57,7 @@ import org.opensearch.search.aggregations.pipeline.SimpleValue; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.util.Arrays; @@ -80,10 +80,10 @@ import static org.hamcrest.Matchers.notNullValue; // TODO: please convert to unit tests! -public class MoreExpressionIT extends ParameterizedOpenSearchIntegTestCase { +public class MoreExpressionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MoreExpressionIT(Settings dynamicSettings) { - super(dynamicSettings); + public MoreExpressionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index b1cb5356a4405..f5cd115e43df5 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -43,7 +43,7 @@ import org.opensearch.script.ScriptType; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -54,10 +54,10 @@ import static org.hamcrest.Matchers.containsString; //TODO: please convert to unit tests! -public class StoredExpressionIT extends ParameterizedOpenSearchIntegTestCase { +public class StoredExpressionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public StoredExpressionIT(Settings dynamicSettings) { - super(dynamicSettings); + public StoredExpressionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index e480fbbd22ad2..204be6ccf1a2c 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -41,7 +41,7 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.plugins.Plugin; import org.opensearch.script.ScriptType; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -58,10 +58,10 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.core.Is.is; -public class MultiSearchTemplateIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiSearchTemplateIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiSearchTemplateIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiSearchTemplateIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java index 8c19c0aafe763..9c0f96cf382a6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ParentChildTestCase.java @@ -41,7 +41,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -51,10 +51,10 @@ import java.util.Map; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public abstract class ParentChildTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class ParentChildTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ParentChildTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public ParentChildTestCase(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index c8763c2f3f749..3f5b7e9defe30 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -57,7 +57,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -90,10 +90,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsNull.notNullValue; -public class PercolatorQuerySearchIT extends ParameterizedOpenSearchIntegTestCase { +public class PercolatorQuerySearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public PercolatorQuerySearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public PercolatorQuerySearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index cdc3cac1a1f06..b5cf9476b2068 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -47,7 +47,7 @@ import org.opensearch.indices.IndexClosedException; import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -62,14 +62,14 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.instanceOf; -public class RankEvalRequestIT extends ParameterizedOpenSearchIntegTestCase { +public class RankEvalRequestIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String TEST_INDEX = "test"; private static final String INDEX_ALIAS = "alias0"; private static final int RELEVANT_RATING_1 = 1; - public RankEvalRequestIT(Settings dynamicSettings) { - super(dynamicSettings); + public RankEvalRequestIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java index c4dcedcc722cf..6a769c6c49d18 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java @@ -69,7 +69,7 @@ import org.opensearch.tasks.TaskInfo; import org.opensearch.tasks.TaskManager; import org.opensearch.test.InternalTestCluster; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportResponseHandler; @@ -99,7 +99,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -public class CancellableTasksIT extends ParameterizedOpenSearchIntegTestCase { +public class CancellableTasksIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int idGenerator = 0; static final Map<TestRequest, CountDownLatch> beforeSendLatches = ConcurrentCollections.newConcurrentMap(); @@ -107,8 +107,8 @@ public class CancellableTasksIT extends ParameterizedOpenSearchIntegTestCase { static final Map<TestRequest, CountDownLatch> beforeExecuteLatches = ConcurrentCollections.newConcurrentMap(); static final Map<TestRequest, CountDownLatch> completedLatches = ConcurrentCollections.newConcurrentMap(); - public CancellableTasksIT(Settings dynamicSettings) { - super(dynamicSettings); + public CancellableTasksIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java index 7bd1467933e00..280f574b1baf9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/GetTermVectorsIT.java @@ -74,8 +74,8 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { - public GetTermVectorsIT(Settings dynamicSettings) { - super(dynamicSettings); + public GetTermVectorsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java index 7c6c47c682281..3fc3235701f17 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/termvectors/MultiTermVectorsIT.java @@ -52,8 +52,8 @@ import static org.hamcrest.Matchers.nullValue; public class MultiTermVectorsIT extends AbstractTermVectorsTestCase { - public MultiTermVectorsIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiTermVectorsIT(Settings staticSettings) { + super(staticSettings); } public void testDuelESLucene() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index bb6e356db188f..fcf2852a4c447 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -41,7 +41,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -51,11 +51,11 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.hamcrest.Matchers.containsString; -public class IndexSortIT extends ParameterizedOpenSearchIntegTestCase { +public class IndexSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final XContentBuilder TEST_MAPPING = createTestMapping(); - public IndexSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public IndexSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java index 2d28578dbebcc..0c57c442be096 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java @@ -41,7 +41,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.search.MatchQuery.ZeroTermsQuery; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -55,12 +55,12 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; -public class MatchPhraseQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class MatchPhraseQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX = "test"; - public MatchPhraseQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public MatchPhraseQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java index 6332b1b97426f..5db97fa92bbeb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.suggest.phrase.PhraseSuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -68,10 +68,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SuggestStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class SuggestStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SuggestStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SuggestStatsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 51dba07a8f9f8..3862d5b2ab4ed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.time.ZoneId; @@ -69,7 +69,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -public class IndicesRequestCacheIT extends ParameterizedOpenSearchIntegTestCase { +public class IndicesRequestCacheIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public IndicesRequestCacheIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index 0c5780210901d..eef1582303fa6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -63,7 +63,7 @@ import org.opensearch.search.SearchService; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -94,9 +94,9 @@ * Integration tests for InternalCircuitBreakerService */ @ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1) -public class CircuitBreakerServiceIT extends ParameterizedOpenSearchIntegTestCase { - public CircuitBreakerServiceIT(Settings dynamicSettings) { - super(dynamicSettings); +public class CircuitBreakerServiceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public CircuitBreakerServiceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 9c96d4861d426..6db88cdee3576 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -85,7 +85,7 @@ import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -122,7 +122,7 @@ @ClusterScope(scope = Scope.SUITE, numDataNodes = 2, numClientNodes = 0) @SuppressCodecs("*") // requires custom completion format -public class IndexStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class IndexStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public IndexStatsIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index f77ae80a55276..d3a105f787a90 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -46,7 +46,7 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.search.fetch.subphase.FetchSourceContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -63,10 +63,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SimpleMgetIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleMgetIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SimpleMgetIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleMgetIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java index 2fbaf4ea5a4d3..05263b1936584 100644 --- a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java @@ -21,7 +21,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.MockSearchService; import org.opensearch.test.MockHttpTransport; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.TestGeoShapeFieldMapperPlugin; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.transport.MockTransportService; @@ -38,7 +38,7 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.apache.logging.log4j.core.util.Throwables.getRootCause; -public class ScriptCacheIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptCacheIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ScriptCacheIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 18b4625761c51..3d4645cfb3694 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -62,7 +62,7 @@ import org.opensearch.search.lookup.LeafFieldsLookup; import org.opensearch.tasks.TaskInfo; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.transport.TransportException; import org.junit.After; @@ -91,7 +91,7 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchCancellationIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchCancellationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private TimeValue requestCancellationTimeout = TimeValue.timeValueSeconds(1); private TimeValue clusterCancellationTimeout = TimeValue.timeValueMillis(1500); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index 52cc797ddd8da..64ce8e75857fa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -44,7 +44,7 @@ import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -59,7 +59,7 @@ import static org.opensearch.search.SearchTimeoutIT.ScriptedTimeoutPlugin.SCRIPT_NAME; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchTimeoutIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchTimeoutIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchTimeoutIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java index 24c72a66da6d0..013a0eaa557ac 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java @@ -40,7 +40,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -52,10 +52,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchWithRejectionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRejectionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRejectionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRejectionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index a61102b9db144..49c934c4f855a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -40,7 +40,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -53,7 +53,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; @ClusterScope(scope = SUITE) -public class StressSearchServiceReaperIT extends ParameterizedOpenSearchIntegTestCase { +public class StressSearchServiceReaperIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public StressSearchServiceReaperIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index 257786c1e9ce5..b7f2e2ac57336 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -62,7 +62,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AggregationsIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class AggregationsIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs; @@ -71,8 +71,8 @@ public class AggregationsIntegrationIT extends ParameterizedOpenSearchIntegTestC + LARGE_STRING.length() + "] used in the request has exceeded the allowed maximum"; - public AggregationsIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public AggregationsIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index 3d3cf1943dfe3..87328c6e11c65 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -42,7 +42,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.missing.Missing; import org.opensearch.search.aggregations.bucket.terms.Terms; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.Arrays; @@ -61,10 +61,10 @@ import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.core.IsNull.notNullValue; -public class CombiIT extends ParameterizedOpenSearchIntegTestCase { +public class CombiIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CombiIT(Settings dynamicSettings) { - super(dynamicSettings); + public CombiIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 2ffdf5fb32778..9fd9a99e15018 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -56,7 +56,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.opensearch.search.aggregations.metrics.Sum; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -94,10 +94,10 @@ * Additional tests that aim at testing more complex aggregation trees on larger random datasets, so that things like * the growth of dynamic arrays is tested. */ -public class EquivalenceIT extends ParameterizedOpenSearchIntegTestCase { +public class EquivalenceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public EquivalenceIT(Settings dynamicSettings) { - super(dynamicSettings); + public EquivalenceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index 1bc0cb36f5fe3..8c7e2cb006d28 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -41,7 +41,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.InternalBucketMetricValue; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -57,10 +57,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public class MetadataIT extends ParameterizedOpenSearchIntegTestCase { +public class MetadataIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MetadataIT(Settings dynamicSettings) { - super(dynamicSettings); + public MetadataIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index e6325987d330f..28a3a2b3c520f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -47,7 +47,7 @@ import org.opensearch.search.aggregations.metrics.Percentiles; import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -65,10 +65,10 @@ import static org.hamcrest.Matchers.closeTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MissingValueIT extends ParameterizedOpenSearchIntegTestCase { +public class MissingValueIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MissingValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public MissingValueIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index cd0922606ec99..4d97925a769dd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -75,7 +75,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AdjacencyMatrixIT extends ParameterizedOpenSearchIntegTestCase { +public class AdjacencyMatrixIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { static int numDocs, numSingleTag1Docs, numSingleTag2Docs, numTag1Docs, numTag2Docs, numMultiTagDocs; static final int MAX_NUM_FILTERS = 3; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java index 7ab1a44ce220c..270240de1fb47 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java @@ -40,7 +40,7 @@ import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -53,7 +53,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BooleanTermsIT extends ParameterizedOpenSearchIntegTestCase { +public class BooleanTermsIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "b_value"; private static final String MULTI_VALUED_FIELD_NAME = "b_values"; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index ee94e574228df..a6434136644ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -59,7 +59,7 @@ import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import org.junit.After; @@ -98,7 +98,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateHistogramIT extends ParameterizedOpenSearchIntegTestCase { +public class DateHistogramIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static Map<ZonedDateTime, Map<String, Object>> expectedMultiSortBuckets; @@ -106,8 +106,8 @@ private ZonedDateTime date(int month, int day) { return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } - public DateHistogramIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateHistogramIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index d44071e1ef9c5..dab6683ceb324 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -43,7 +43,7 @@ import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import org.junit.Before; @@ -69,13 +69,13 @@ */ @OpenSearchIntegTestCase.SuiteScopeTestCase @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class DateHistogramOffsetIT extends ParameterizedOpenSearchIntegTestCase { +public class DateHistogramOffsetIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String DATE_FORMAT = "yyyy-MM-dd:hh-mm-ss"; private static final DateFormatter FORMATTER = DateFormatter.forPattern(DATE_FORMAT); - public DateHistogramOffsetIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateHistogramOffsetIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index ae4243019ffb1..392253626089f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.range.Range.Bucket; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.time.ZoneId; @@ -81,10 +81,10 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateRangeIT extends ParameterizedOpenSearchIntegTestCase { +public class DateRangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public DateRangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateRangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 1d5f7f93e7410..7046c7e149f80 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.opensearch.search.aggregations.metrics.Max; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,12 +71,12 @@ * Tests the Sampler aggregation */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DiversifiedSamplerIT extends ParameterizedOpenSearchIntegTestCase { +public class DiversifiedSamplerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public static final int NUM_SHARDS = 2; - public DiversifiedSamplerIT(Settings dynamicSettings) { - super(dynamicSettings); + public DiversifiedSamplerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java index 88bb41923e53f..ccb4af8386472 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DoubleTermsIT.java @@ -88,8 +88,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class DoubleTermsIT extends AbstractTermsTestCase { - public DoubleTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public DoubleTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index 7aa98803403e0..86c3051173a2d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -46,7 +46,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -68,12 +68,12 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class FilterIT extends ParameterizedOpenSearchIntegTestCase { +public class FilterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs, numTag1Docs; - public FilterIT(Settings dynamicSettings) { - super(dynamicSettings); + public FilterIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index b6cf515df78ba..e0b1fb47823c1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -72,7 +72,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class FiltersIT extends ParameterizedOpenSearchIntegTestCase { +public class FiltersIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { static int numDocs, numTag1Docs, numTag2Docs, numOtherDocs; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index 025bebf8b254d..8bf20d8fec34f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.aggregations.bucket.range.Range.Bucket; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import org.hamcrest.Matchers; @@ -76,10 +76,10 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GeoDistanceIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoDistanceIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoDistanceIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java index be31a3afadad0..395ff8fe1bc4b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java @@ -43,7 +43,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -61,12 +61,12 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GlobalIT extends ParameterizedOpenSearchIntegTestCase { +public class GlobalIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static int numDocs; - public GlobalIT(Settings dynamicSettings) { - super(dynamicSettings); + public GlobalIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java index 75f57d1cc4c0e..fbe1002b3a76a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java @@ -56,7 +56,7 @@ import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -91,7 +91,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class HistogramIT extends ParameterizedOpenSearchIntegTestCase { +public class HistogramIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; @@ -102,8 +102,8 @@ public class HistogramIT extends ParameterizedOpenSearchIntegTestCase { static long[] valueCounts, valuesCounts; static Map<Long, Map<String, Object>> expectedMultiSortBuckets; - public HistogramIT(Settings dynamicSettings) { - super(dynamicSettings); + public HistogramIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index 14a3685bd183e..969830e822968 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -45,7 +45,7 @@ import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.bucket.range.Range; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -60,10 +60,10 @@ import static org.hamcrest.Matchers.instanceOf; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class IpRangeIT extends ParameterizedOpenSearchIntegTestCase { +public class IpRangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public IpRangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public IpRangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java index c712c97af5c71..4d2da4fa1d14b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpTermsIT.java @@ -51,8 +51,8 @@ public class IpTermsIT extends AbstractTermsTestCase { - public IpTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public IpTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java index 345cbdae8ef07..49031bfd3fc1d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/LongTermsIT.java @@ -86,8 +86,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class LongTermsIT extends AbstractTermsTestCase { - public LongTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public LongTermsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java index 90dafc0d57887..781d2acc5e2be 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MinDocCountIT.java @@ -82,8 +82,8 @@ public class MinDocCountIT extends AbstractTermsTestCase { private static final QueryBuilder QUERY = QueryBuilders.termQuery("match", true); private static int cardinality; - public MinDocCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public MinDocCountIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java index ea5a59d89309f..09133f720f9f7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java @@ -34,8 +34,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class MultiTermsIT extends BaseStringTermsTestCase { - public MultiTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiTermsIT(Settings staticSettings) { + super(staticSettings); } // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index 6289cd5e36151..1c627530d195a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -51,7 +51,7 @@ import org.opensearch.search.aggregations.support.ValuesSource; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -67,7 +67,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class NaNSortingIT extends ParameterizedOpenSearchIntegTestCase { +public class NaNSortingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private enum SubAggregation { AVG("avg") { @@ -139,8 +139,8 @@ public String sortKey() { public abstract double getValue(Aggregation aggregation); } - public NaNSortingIT(Settings dynamicSettings) { - super(dynamicSettings); + public NaNSortingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index 7af2ac218800d..cd64bbf978786 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -57,7 +57,7 @@ import org.opensearch.search.aggregations.metrics.Stats; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -92,14 +92,14 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class NestedIT extends ParameterizedOpenSearchIntegTestCase { +public class NestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int numParents; private static int[] numChildren; private static SubAggCollectionMode aggCollectionMode; - public NestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public NestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 5812b7796c33e..20a0345dba127 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -51,7 +51,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -79,15 +79,15 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class RangeIT extends ParameterizedOpenSearchIntegTestCase { +public class RangeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final String MULTI_VALUED_FIELD_NAME = "l_values"; static int numDocs; - public RangeIT(Settings dynamicSettings) { - super(dynamicSettings); + public RangeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index 2716db6b7e745..b0258b035f83c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -47,7 +47,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.ValueCount; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -76,10 +76,10 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ReverseNestedIT extends ParameterizedOpenSearchIntegTestCase { +public class ReverseNestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ReverseNestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public ReverseNestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index c7b03d21cb6bb..692b45b16065b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; import org.opensearch.search.aggregations.metrics.Max; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,7 +71,7 @@ * Tests the Sampler aggregation */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SamplerIT extends ParameterizedOpenSearchIntegTestCase { +public class SamplerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public static final int NUM_SHARDS = 2; @@ -79,8 +79,8 @@ public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } - public SamplerIT(Settings dynamicSettings) { - super(dynamicSettings); + public SamplerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 66d761c56634e..4b0fb436a0cf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.aggregations.bucket.range.Range; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -77,10 +77,10 @@ * we can make sure that the reduce is properly propagated by checking that empty buckets were created. */ @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ShardReduceIT extends ParameterizedOpenSearchIntegTestCase { +public class ShardReduceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ShardReduceIT(Settings dynamicSettings) { - super(dynamicSettings); + public ShardReduceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java index 7c7cc12888307..66cce21bcf86f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardSizeTermsIT.java @@ -47,8 +47,8 @@ public class ShardSizeTermsIT extends ShardSizeTestCase { - public ShardSizeTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ShardSizeTermsIT(Settings staticSettings) { + super(staticSettings); } public void testNoShardSizeString() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index e914b87754865..e11bffaf66eb1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -65,7 +65,7 @@ import org.opensearch.search.aggregations.bucket.terms.heuristic.ScriptHeuristic; import org.opensearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.search.aggregations.bucket.SharedSignificantTermsTestMethods; import java.io.IOException; @@ -95,14 +95,14 @@ import static org.hamcrest.Matchers.is; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SignificantTermsSignificanceScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class SignificantTermsSignificanceScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static final String INDEX_NAME = "testidx"; static final String TEXT_FIELD = "text"; static final String CLASS_FIELD = "class"; - public SignificantTermsSignificanceScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public SignificantTermsSignificanceScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 343cea4b94c87..7431734b8d5f5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -45,7 +45,7 @@ import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -67,7 +67,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class TermsDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsDocCountErrorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String STRING_FIELD_NAME = "s_value"; private static final String LONG_FIELD_NAME = "l_value"; @@ -79,8 +79,8 @@ public static String randomExecutionHint() { private static int numRoutingValues; - public TermsDocCountErrorIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsDocCountErrorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java index 5ad913e8c7086..c1c99a9beb689 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -17,7 +17,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -29,12 +29,12 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @OpenSearchIntegTestCase.ClusterScope(scope = TEST, numClientNodes = 0, maxNumDataNodes = 1, supportsDedicatedMasters = false) -public class TermsFixedDocCountErrorIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsFixedDocCountErrorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String STRING_FIELD_NAME = "s_value"; - public TermsFixedDocCountErrorIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsFixedDocCountErrorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 3851b16551795..48be94ba9b7cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -44,7 +44,7 @@ import org.opensearch.search.aggregations.bucket.terms.SignificantTerms; import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregatorFactory; import org.opensearch.search.aggregations.bucket.terms.Terms; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -61,12 +61,12 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.equalTo; -public class TermsShardMinDocCountIT extends ParameterizedOpenSearchIntegTestCase { +public class TermsShardMinDocCountIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String index = "someindex"; - public TermsShardMinDocCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public TermsShardMinDocCountIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java index 20caa4fd076fe..79aa4a648310a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java @@ -38,8 +38,8 @@ public class BaseStringTermsTestCase extends AbstractTermsTestCase { protected static final String MULTI_VALUED_FIELD_NAME = "s_values"; protected static Map<String, Map<String, Object>> expectedMultiSortBuckets; - public BaseStringTermsTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public BaseStringTermsTestCase(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index 8c727d280ec52..edf9cd432dda2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -79,8 +79,8 @@ @OpenSearchIntegTestCase.SuiteScopeTestCase public class StringTermsIT extends BaseStringTermsTestCase { - public StringTermsIT(Settings dynamicSettings) { - super(dynamicSettings); + public StringTermsIT(Settings staticSettings) { + super(staticSettings); } // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 9ebec21367164..977ce3df8b3d1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -71,10 +71,10 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class CardinalityIT extends ParameterizedOpenSearchIntegTestCase { +public class CardinalityIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CardinalityIT(Settings dynamicSettings) { - super(dynamicSettings); + public CardinalityIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index 94756f3fe9f99..e00fd7516789c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -43,7 +43,7 @@ import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.search.aggregations.Aggregator; import org.opensearch.search.aggregations.BucketOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -54,10 +54,10 @@ import static org.opensearch.search.aggregations.AggregationBuilders.cardinality; import static org.opensearch.search.aggregations.AggregationBuilders.terms; -public class CardinalityWithRequestBreakerIT extends ParameterizedOpenSearchIntegTestCase { +public class CardinalityWithRequestBreakerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CardinalityWithRequestBreakerIT(Settings dynamicSettings) { - super(dynamicSettings); + public CardinalityWithRequestBreakerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java index 3d804b9aa626e..4a2c100690de4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -70,8 +70,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { - public ExtendedStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExtendedStatsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java index 78100d1778ecf..ed87fa6d8f5f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/GeoCentroidIT.java @@ -55,8 +55,8 @@ public class GeoCentroidIT extends AbstractGeoTestCase { private static final String aggName = "geoCentroid"; - public GeoCentroidIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoCentroidIT(Settings staticSettings) { + super(staticSettings); } public void testEmptyAggregation() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 7ca5130388eea..ae67f0b1c0b66 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -72,8 +72,8 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { - public HDRPercentileRanksIT(Settings dynamicSettings) { - super(dynamicSettings); + public HDRPercentileRanksIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java index ec913b3e130f5..ff1cab85c18e6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -75,8 +75,8 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { - public HDRPercentilesIT(Settings dynamicSettings) { - super(dynamicSettings); + public HDRPercentilesIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index b8447d682abae..0edba475a6401 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -91,8 +91,8 @@ public class MedianAbsoluteDeviationIT extends AbstractNumericTestCase { private static double singleValueExactMAD; private static double multiValueExactMAD; - public MedianAbsoluteDeviationIT(Settings dynamicSettings) { - super(dynamicSettings); + public MedianAbsoluteDeviationIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index ced2358ac3f78..a097268cd1d67 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -56,7 +56,7 @@ import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -93,12 +93,12 @@ @ClusterScope(scope = Scope.SUITE) @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ScriptedMetricIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptedMetricIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static long numDocs; - public ScriptedMetricIT(Settings dynamicSettings) { - super(dynamicSettings); + public ScriptedMetricIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java index f957a74eeb9d0..3708e1e6ab21b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/StatsIT.java @@ -66,8 +66,8 @@ import static org.hamcrest.Matchers.sameInstance; public class StatsIT extends AbstractNumericTestCase { - public StatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public StatsIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java index 382d656448114..b2aa3438b2306 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/SumIT.java @@ -68,8 +68,8 @@ public class SumIT extends AbstractNumericTestCase { - public SumIT(Settings dynamicSettings) { - super(dynamicSettings); + public SumIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 941d3a888db29..4225c027c4d96 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -72,8 +72,8 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { - public TDigestPercentileRanksIT(Settings dynamicSettings) { - super(dynamicSettings); + public TDigestPercentileRanksIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java index 6457cf9307fa1..974e90fab16e8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -74,8 +74,8 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { - public TDigestPercentilesIT(Settings dynamicSettings) { - super(dynamicSettings); + public TDigestPercentilesIT(Settings staticSettings) { + super(staticSettings); } @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index 10e51079cf389..59a0aea4dae34 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -70,7 +70,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -111,13 +111,13 @@ import static org.hamcrest.Matchers.sameInstance; @OpenSearchIntegTestCase.SuiteScopeTestCase() -public class TopHitsIT extends ParameterizedOpenSearchIntegTestCase { +public class TopHitsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String TERMS_AGGS_FIELD = "terms"; private static final String SORT_FIELD = "sort"; - public TopHitsIT(Settings dynamicSettings) { - super(dynamicSettings); + public TopHitsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index 833d1ce3bb4c3..4c23f2e7666fa 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -45,7 +45,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -73,10 +73,10 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ValueCountIT extends ParameterizedOpenSearchIntegTestCase { +public class ValueCountIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ValueCountIT(Settings dynamicSettings) { - super(dynamicSettings); + public ValueCountIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index bec9203384026..77008afdf1a0a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -46,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AvgBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class AvgBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +77,8 @@ public class AvgBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public AvgBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public AvgBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java index 4c3129eb89e3b..5262a35d9d135 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java @@ -51,7 +51,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -76,7 +76,7 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketScriptIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketScriptIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -90,8 +90,8 @@ public class BucketScriptIT extends ParameterizedOpenSearchIntegTestCase { private static int maxNumber; private static long date; - public BucketScriptIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketScriptIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java index a7b28add7373a..f221349d7ec82 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -77,7 +77,7 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketSelectorIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketSelectorIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String FIELD_1_NAME = "field1"; private static final String FIELD_2_NAME = "field2"; @@ -89,8 +89,8 @@ public class BucketSelectorIT extends ParameterizedOpenSearchIntegTestCase { private static int minNumber; private static int maxNumber; - public BucketSelectorIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketSelectorIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java index 2e4fd7a412118..f1f29bfb9541d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.time.ZonedDateTime; @@ -75,7 +75,7 @@ import static org.hamcrest.Matchers.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class BucketSortIT extends ParameterizedOpenSearchIntegTestCase { +public class BucketSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX = "bucket-sort-it-data-index"; private static final String INDEX_WITH_GAPS = "bucket-sort-it-data-index-with-gaps"; @@ -85,8 +85,8 @@ public class BucketSortIT extends ParameterizedOpenSearchIntegTestCase { private static final String VALUE_1_FIELD = "value_1"; private static final String VALUE_2_FIELD = "value_2"; - public BucketSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public BucketSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index b05ff7b4329cd..e77245e6391a1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -48,7 +48,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.support.AggregationPath; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matcher; import org.junit.After; @@ -76,15 +76,15 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DateDerivativeIT extends ParameterizedOpenSearchIntegTestCase { +public class DateDerivativeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { // some index names used during these tests private static final String IDX_DST_START = "idx_dst_start"; private static final String IDX_DST_END = "idx_dst_end"; private static final String IDX_DST_KATHMANDU = "idx_dst_kathmandu"; - public DateDerivativeIT(Settings dynamicSettings) { - super(dynamicSettings); + public DateDerivativeIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index 41bbffc13658b..14b56787e6e7c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -51,7 +51,7 @@ import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.search.aggregations.support.AggregationPath; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -78,7 +78,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class DerivativeIT extends ParameterizedOpenSearchIntegTestCase { +public class DerivativeIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index 299827e2413d4..b1a59525ab900 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -69,7 +69,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class ExtendedStatsBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class ExtendedStatsBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -80,8 +80,8 @@ public class ExtendedStatsBucketIT extends ParameterizedOpenSearchIntegTestCase static int numValueBuckets; static long[] valueCounts; - public ExtendedStatsBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExtendedStatsBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index dc3b690c7f78f..eb9397cfbbe98 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -58,7 +58,7 @@ import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -80,7 +80,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MaxBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class MaxBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -91,8 +91,8 @@ public class MaxBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public MaxBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public MaxBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 189bfd9b5b80a..0a00d59ea76b4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -46,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MinBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class MinBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +77,8 @@ public class MinBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public MinBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public MinBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index 8ad3107ac33ac..a4f8e16e13fc3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.aggregations.metrics.Avg; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -77,7 +77,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class MovAvgIT extends ParameterizedOpenSearchIntegTestCase { +public class MovAvgIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; private static final String VALUE_FIELD2 = "v_value2"; @@ -133,8 +133,8 @@ public String toString() { } } - public MovAvgIT(Settings dynamicSettings) { - super(dynamicSettings); + public MovAvgIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 580497715ed6d..079b7a1d12252 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -47,7 +47,7 @@ import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -69,7 +69,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class PercentilesBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class PercentilesBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; private static final double[] PERCENTS = { 0.0, 1.0, 25.0, 50.0, 75.0, 99.0, 100.0 }; @@ -80,8 +80,8 @@ public class PercentilesBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public PercentilesBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public PercentilesBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java index b4da63802bc50..765709940611b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java @@ -43,7 +43,7 @@ import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -69,7 +69,7 @@ import static org.hamcrest.core.IsNull.nullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SerialDiffIT extends ParameterizedOpenSearchIntegTestCase { +public class SerialDiffIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INTERVAL_FIELD = "l_value"; private static final String VALUE_FIELD = "v_value"; @@ -98,8 +98,8 @@ public String toString() { } } - public SerialDiffIT(Settings dynamicSettings) { - super(dynamicSettings); + public SerialDiffIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index 21fdd5e761e77..d4c9b654ad4e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -46,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class StatsBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class StatsBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; static int numDocs; @@ -76,8 +76,8 @@ public class StatsBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public StatsBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public StatsBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index d4bd8f21b2a99..9f8fa83f3d4fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -46,7 +46,7 @@ import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -66,7 +66,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class SumBucketIT extends ParameterizedOpenSearchIntegTestCase { +public class SumBucketIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; @@ -77,8 +77,8 @@ public class SumBucketIT extends ParameterizedOpenSearchIntegTestCase { static int numValueBuckets; static long[] valueCounts; - public SumBucketIT(Settings dynamicSettings) { - super(dynamicSettings); + public SumBucketIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java index 28ada82a1c56b..74f39d6c626e3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java @@ -37,7 +37,7 @@ import org.opensearch.tasks.CancellableTask; import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; import org.hamcrest.MatcherAssert; @@ -61,13 +61,13 @@ import static org.hamcrest.Matchers.instanceOf; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class SearchBackpressureIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchBackpressureIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final TimeValue TIMEOUT = new TimeValue(10, TimeUnit.SECONDS); private static final int MOVING_AVERAGE_WINDOW_SIZE = 10; - public SearchBackpressureIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchBackpressureIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java index bd623ccdf2731..1ef9930ede86d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java @@ -46,7 +46,7 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchService; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.After; import java.util.Arrays; @@ -61,10 +61,10 @@ import static org.hamcrest.Matchers.lessThan; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchRedStateIndexIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchRedStateIndexIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchRedStateIndexIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchRedStateIndexIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java index a5989b693d332..51a40e51f8964 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java @@ -41,7 +41,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -54,10 +54,10 @@ * This test basically verifies that search with a single shard active (cause we indexed to it) and other * shards possibly not active at all (cause they haven't allocated) will still work. */ -public class SearchWhileCreatingIndexIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWhileCreatingIndexIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWhileCreatingIndexIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWhileCreatingIndexIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index 6d2ec845afa98..2564fb893978b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -43,7 +43,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -60,10 +60,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchWhileRelocatingIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWhileRelocatingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWhileRelocatingIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWhileRelocatingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index aa82b9d21c7fb..3ad29dfc8dfef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -56,7 +56,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.engine.MockEngineSupport; import org.opensearch.test.engine.ThrowingLeafReaderWrapper; @@ -71,10 +71,10 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -public class SearchWithRandomExceptionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRandomExceptionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRandomExceptionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRandomExceptionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index 446a0bce58d66..9b03f4384c8f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -51,7 +51,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.store.MockFSDirectoryFactory; import org.opensearch.test.store.MockFSIndexStore; @@ -64,10 +64,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; -public class SearchWithRandomIOExceptionsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchWithRandomIOExceptionsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchWithRandomIOExceptionsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchWithRandomIOExceptionsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java index cbe52abf5279b..325fa170e0761 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java @@ -48,7 +48,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -64,10 +64,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class TransportSearchFailuresIT extends ParameterizedOpenSearchIntegTestCase { +public class TransportSearchFailuresIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public TransportSearchFailuresIT(Settings dynamicSettings) { - super(dynamicSettings); + public TransportSearchFailuresIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java index edceb0cbc0d24..70f135690143f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java @@ -54,7 +54,7 @@ import org.opensearch.search.aggregations.bucket.global.Global; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -80,10 +80,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; -public class TransportTwoNodesSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class TransportTwoNodesSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public TransportTwoNodesSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public TransportTwoNodesSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java index 87f2153eb800f..4ae4ad81def4a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java @@ -54,7 +54,7 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -74,10 +74,10 @@ import static org.hamcrest.CoreMatchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 2) -public class FetchSubPhasePluginIT extends ParameterizedOpenSearchIntegTestCase { +public class FetchSubPhasePluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FetchSubPhasePluginIT(Settings dynamicSettings) { - super(dynamicSettings); + public FetchSubPhasePluginIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index 1a730c01e4890..266316cdff776 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -58,7 +58,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -88,10 +88,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class InnerHitsIT extends ParameterizedOpenSearchIntegTestCase { +public class InnerHitsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public InnerHitsIT(Settings dynamicSettings) { - super(dynamicSettings); + public InnerHitsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index 83cedb8c20e1d..d10d62e34bb50 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -45,7 +45,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -64,10 +64,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItemInArray; -public class MatchedQueriesIT extends ParameterizedOpenSearchIntegTestCase { +public class MatchedQueriesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MatchedQueriesIT(Settings dynamicSettings) { - super(dynamicSettings); + public MatchedQueriesIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index fe17c3e22d43c..b10bb90538881 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -40,7 +40,7 @@ import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -57,10 +57,10 @@ * Integration test for highlighters registered by a plugin. */ @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class CustomHighlighterSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class CustomHighlighterSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public CustomHighlighterSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public CustomHighlighterSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 2afa911223074..3c81f8990128c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -75,7 +75,7 @@ import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.MockKeywordPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matcher; import org.hamcrest.Matchers; @@ -128,13 +128,13 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.startsWith; -public class HighlighterSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class HighlighterSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { // TODO as we move analyzers out of the core we need to move some of these into HighlighterWithAnalyzersTests private static final String[] ALL_TYPES = new String[] { "plain", "fvh", "unified" }; - public HighlighterSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public HighlighterSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index f5d1b8234558e..99d5a1f8facce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -44,7 +44,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.MapperPlugin; import org.opensearch.plugins.Plugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.util.ArrayList; @@ -59,10 +59,10 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; -public class FieldCapabilitiesIT extends ParameterizedOpenSearchIntegTestCase { +public class FieldCapabilitiesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FieldCapabilitiesIT(Settings dynamicSettings) { - super(dynamicSettings); + public FieldCapabilitiesIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index ed8fe74504f92..beb5679ae9b74 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -63,7 +63,7 @@ import org.opensearch.search.lookup.FieldLookup; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -104,10 +104,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class SearchFieldsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchFieldsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchFieldsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchFieldsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index 3a6624c2ad2e6..c13338c297ad4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -56,7 +56,7 @@ import org.opensearch.search.MultiValueMode; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.time.ZoneOffset; @@ -91,10 +91,10 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; -public class DecayFunctionScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class DecayFunctionScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public DecayFunctionScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public DecayFunctionScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java index 62d0d89c644a5..2890b4afa7129 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java @@ -58,7 +58,7 @@ import org.opensearch.search.lookup.SearchLookup; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -83,10 +83,10 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class ExplainableScriptIT extends ParameterizedOpenSearchIntegTestCase { +public class ExplainableScriptIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ExplainableScriptIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExplainableScriptIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index d53f55b98bd23..7f7dfba541f6d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -40,7 +40,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -61,10 +61,10 @@ /** * Tests for the {@code field_value_factor} function in a function_score query. */ -public class FunctionScoreFieldValueIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScoreFieldValueIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FunctionScoreFieldValueIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScoreFieldValueIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java index 3b80d437e95c0..c846e40c9cf41 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -78,13 +78,13 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class FunctionScoreIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScoreIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { static final String TYPE = "type"; static final String INDEX = "index"; - public FunctionScoreIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScoreIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index a91f53dae04d2..3ced0b457c44c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -52,7 +52,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.OpenSearchIntegTestCase.Scope; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -71,10 +71,10 @@ import static org.hamcrest.Matchers.equalTo; @ClusterScope(scope = Scope.SUITE, supportsDedicatedMasters = false, numDataNodes = 1) -public class FunctionScorePluginIT extends ParameterizedOpenSearchIntegTestCase { +public class FunctionScorePluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public FunctionScorePluginIT(Settings dynamicSettings) { - super(dynamicSettings); + public FunctionScorePluginIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index bda6284d9535a..8286d089e3668 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -55,7 +55,7 @@ import org.opensearch.search.rescore.QueryRescoreMode; import org.opensearch.search.rescore.QueryRescorerBuilder; import org.opensearch.search.sort.SortBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -91,10 +91,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; -public class QueryRescorerIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryRescorerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public QueryRescorerIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryRescorerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index 69e30fc879dd8..0b18e46050e2d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -47,7 +47,7 @@ import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.CoreMatchers; import java.util.Arrays; @@ -76,10 +76,10 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; -public class RandomScoreFunctionIT extends ParameterizedOpenSearchIntegTestCase { +public class RandomScoreFunctionIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public RandomScoreFunctionIT(Settings dynamicSettings) { - super(dynamicSettings); + public RandomScoreFunctionIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index ba519be04edff..cfea8fc2af033 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -64,7 +64,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import org.junit.BeforeClass; @@ -99,10 +99,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; -public class GeoFilterIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoFilterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoFilterIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoFilterIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index 85cb087585d31..ccafddd84b605 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -42,7 +42,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.util.ArrayList; @@ -60,10 +60,10 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class GeoPolygonIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoPolygonIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoPolygonIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoPolygonIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 1f9b6ae434f75..3a2c5113797f9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -48,7 +48,7 @@ import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.indices.IndicesService; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -61,10 +61,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class GeoShapeIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoShapeIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoShapeIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoShapeIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index d21d6036c9673..7e7472f16a78b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -50,7 +50,7 @@ import org.opensearch.index.mapper.LegacyGeoShapeFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.indices.IndicesService; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -63,10 +63,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; -public class LegacyGeoShapeIntegrationIT extends ParameterizedOpenSearchIntegTestCase { +public class LegacyGeoShapeIntegrationIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public LegacyGeoShapeIntegrationIT(Settings dynamicSettings) { - super(dynamicSettings); + public LegacyGeoShapeIntegrationIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index dc7c4e687c2fa..0ea47ebb9ea26 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -50,7 +50,7 @@ import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -79,10 +79,10 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.notNullValue; -public class MoreLikeThisIT extends ParameterizedOpenSearchIntegTestCase { +public class MoreLikeThisIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MoreLikeThisIT(Settings dynamicSettings) { - super(dynamicSettings); + public MoreLikeThisIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java index b35208941d2a2..0cf217ca2ee4d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java @@ -40,7 +40,7 @@ import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -52,10 +52,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; import static org.hamcrest.Matchers.equalTo; -public class MultiSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiSearchIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiSearchIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index 8eeffcbecb377..3755e3d07cae4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -56,7 +56,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortMode; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -77,10 +77,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; -public class SimpleNestedIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleNestedIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SimpleNestedIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleNestedIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java index a3432bfe7e3e4..05e4e18916535 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java @@ -37,7 +37,7 @@ import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.junit.After; @@ -67,7 +67,7 @@ * Multi node integration tests for PIT creation and search operation with PIT ID. */ @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, numDataNodes = 2) -public class PitMultiNodeIT extends ParameterizedOpenSearchIntegTestCase { +public class PitMultiNodeIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public PitMultiNodeIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java index 6e40c08ed08a1..8e0fc319192ad 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java @@ -50,7 +50,7 @@ import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.node.Node; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -70,10 +70,10 @@ import static org.hamcrest.Matchers.not; @OpenSearchIntegTestCase.ClusterScope(minNumDataNodes = 2) -public class SearchPreferenceIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchPreferenceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchPreferenceIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchPreferenceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 82dd6225fda4e..7cd0623ba48fd 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.profile.query.CollectorResult; import org.opensearch.search.profile.query.QueryProfileShardResult; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.core.IsNull; import java.util.ArrayList; @@ -83,7 +83,7 @@ import static org.hamcrest.Matchers.sameInstance; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class AggregationProfilerIT extends ParameterizedOpenSearchIntegTestCase { +public class AggregationProfilerIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String BUILD_LEAF_COLLECTOR = AggregationTimingType.BUILD_LEAF_COLLECTOR.toString(); private static final String COLLECT = AggregationTimingType.COLLECT.toString(); @@ -166,8 +166,8 @@ public class AggregationProfilerIT extends ParameterizedOpenSearchIntegTestCase private static final String REASON_SEARCH_TOP_HITS = "search_top_hits"; private static final String REASON_AGGREGATION = "aggregation"; - public AggregationProfilerIT(Settings dynamicSettings) { - super(dynamicSettings); + public AggregationProfilerIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index ef73438114079..199a8d5f0896f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.profile.ProfileResult; import org.opensearch.search.profile.ProfileShardResult; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -68,7 +68,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -public class QueryProfilerIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryProfilerIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { private final boolean concurrentSearchEnabled; private static final String MAX_PREFIX = "max_"; private static final String MIN_PREFIX = "min_"; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index e3253ea583ac2..d3a7f9fbb6423 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -44,7 +44,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -62,10 +62,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public class ExistsIT extends ParameterizedOpenSearchIntegTestCase { +public class ExistsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ExistsIT(Settings dynamicSettings) { - super(dynamicSettings); + public ExistsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index 457114bac33b8..e925e24e1d297 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -54,7 +54,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.MockKeywordPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import java.io.IOException; @@ -92,10 +92,10 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; -public class MultiMatchQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class MultiMatchQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MultiMatchQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public MultiMatchQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index 1ca5859f23bca..e1d1e95f919e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -47,7 +47,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.search.SearchModule; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.Before; import org.junit.BeforeClass; @@ -70,12 +70,12 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class QueryStringIT extends ParameterizedOpenSearchIntegTestCase { +public class QueryStringIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int CLUSTER_MAX_CLAUSE_COUNT; - public QueryStringIT(Settings dynamicSettings) { - super(dynamicSettings); + public QueryStringIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index 55029712a061c..bb09f96eee7f3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -46,7 +46,7 @@ import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -67,10 +67,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertThirdHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasScore; -public class ScriptScoreQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptScoreQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public ScriptScoreQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public ScriptScoreQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 03312c6e1e2f7..574ca618bd4a3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -80,7 +80,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.junit.annotations.TestIssueLogging; import java.io.IOException; @@ -147,10 +147,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class SearchQueryIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchQueryIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchQueryIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchQueryIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index d8902238005da..aef58fc679946 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -60,7 +60,7 @@ import org.opensearch.search.SearchHits; import org.opensearch.search.SearchModule; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.junit.BeforeClass; import java.io.IOException; @@ -95,12 +95,12 @@ /** * Tests for the {@code simple_query_string} query */ -public class SimpleQueryStringIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleQueryStringIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static int CLUSTER_MAX_CLAUSE_COUNT; - public SimpleQueryStringIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleQueryStringIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index ae00904f237a5..faf1e5f24c5b4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -50,7 +50,7 @@ import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.Arrays; @@ -71,7 +71,7 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE) -public class ScriptQuerySearchIT extends ParameterizedOpenSearchIntegTestCase { +public class ScriptQuerySearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ScriptQuerySearchIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index c7a6d18f881c6..e39469546dd0b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -47,7 +47,7 @@ import org.opensearch.search.sort.SortBuilder; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -60,7 +60,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; -public class DuelScrollIT extends ParameterizedOpenSearchIntegTestCase { +public class DuelScrollIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public DuelScrollIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index b82048ffc924e..c4a14193cbdce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -60,7 +60,7 @@ import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.InternalTestCluster; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import org.junit.After; @@ -92,7 +92,7 @@ /** * Tests for scrolling. */ -public class SearchScrollIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchScrollIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchScrollIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java index 27002b844da1d..382a3e4605b70 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -41,7 +41,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -58,7 +58,7 @@ import static org.hamcrest.Matchers.lessThan; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 2, numClientNodes = 0) -public class SearchScrollWithFailingNodesIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchScrollWithFailingNodesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SearchScrollWithFailingNodesIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index b99f66850e9e3..5b848ec38eece 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -52,7 +52,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.util.ArrayList; @@ -69,7 +69,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; -public class SearchAfterIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchAfterIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String INDEX_NAME = "test"; private static final int NUM_DOCS = 100; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index 7aae41d939cac..be666dbc304b5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -52,7 +52,7 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.rescore.QueryRescorerBuilder; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -77,7 +77,7 @@ import static org.apache.lucene.search.TotalHits.Relation.EQUAL_TO; import static org.apache.lucene.search.TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; -public class SimpleSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SimpleSearchIT(Settings settings) { super(settings); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index 27a56f9d14f08..1d14ab40882fb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -53,7 +53,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.search.sort.SortBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -70,9 +70,9 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; -public class SearchSliceIT extends ParameterizedOpenSearchIntegTestCase { - public SearchSliceIT(Settings dynamicSettings) { - super(dynamicSettings); +public class SearchSliceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public SearchSliceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 81e948640ee94..f77f5a682da25 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -63,7 +63,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedDynamicSettingsOpenSearchIntegTestCase; import org.hamcrest.Matchers; import java.io.IOException; @@ -109,7 +109,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.oneOf; -public class FieldSortIT extends ParameterizedOpenSearchIntegTestCase { +public class FieldSortIT extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { public FieldSortIT(Settings dynamicSettings) { super(dynamicSettings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index 766ac6139b24b..d68cbe2fe4458 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -45,7 +45,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.geometry.utils.Geohash; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.io.IOException; @@ -66,10 +66,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -public class GeoDistanceIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public GeoDistanceIT(Settings dynamicSettings) { - super(dynamicSettings); + public GeoDistanceIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index 1b8bd9694483d..29ecab82e7a83 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -45,7 +45,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.GeoValidationMethod; import org.opensearch.search.builder.SearchSourceBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.VersionUtils; import java.io.IOException; @@ -65,7 +65,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSortValues; import static org.hamcrest.Matchers.closeTo; -public class GeoDistanceSortBuilderIT extends ParameterizedOpenSearchIntegTestCase { +public class GeoDistanceSortBuilderIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public GeoDistanceSortBuilderIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index ddfbc3cce2be6..3d8d6ed291366 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -49,7 +49,7 @@ import org.opensearch.search.SearchHit; import org.opensearch.search.sort.ScriptSortBuilder.ScriptSortType; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -75,12 +75,12 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimpleSortIT extends ParameterizedOpenSearchIntegTestCase { +public class SimpleSortIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { private static final String DOUBLE_APOSTROPHE = "\u0027\u0027"; - public SimpleSortIT(Settings dynamicSettings) { - super(dynamicSettings); + public SimpleSortIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java index 76e68781c72ba..051d7bc888d83 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java @@ -19,7 +19,7 @@ import org.opensearch.search.sort.plugin.CustomSortBuilder; import org.opensearch.search.sort.plugin.CustomSortPlugin; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -27,7 +27,7 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.hamcrest.Matchers.equalTo; -public class SortFromPluginIT extends ParameterizedOpenSearchIntegTestCase { +public class SortFromPluginIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SortFromPluginIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index a9c4bf841d9a1..7ac2c2c5e4015 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -45,7 +45,7 @@ import org.opensearch.search.SearchException; import org.opensearch.search.SearchHits; import org.opensearch.search.fetch.subphase.FetchSourceContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -57,10 +57,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class MetadataFetchingIT extends ParameterizedOpenSearchIntegTestCase { +public class MetadataFetchingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public MetadataFetchingIT(Settings dynamicSettings) { - super(dynamicSettings); + public MetadataFetchingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java index 805e82dc9850b..465bf154bbb9f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java @@ -37,7 +37,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -47,10 +47,10 @@ import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; -public class SourceFetchingIT extends ParameterizedOpenSearchIntegTestCase { +public class SourceFetchingIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SourceFetchingIT(Settings dynamicSettings) { - super(dynamicSettings); + public SourceFetchingIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 8fb3c57dd7680..06de4bd1f85e0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -54,7 +54,7 @@ import org.opensearch.script.ScriptType; import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -81,10 +81,10 @@ import static org.hamcrest.Matchers.nullValue; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, minNumDataNodes = 2) -public class SearchStatsIT extends ParameterizedOpenSearchIntegTestCase { +public class SearchStatsIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public SearchStatsIT(Settings dynamicSettings) { - super(dynamicSettings); + public SearchStatsIT(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index b342e6d35f0b4..fa4ce2a0870ec 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -65,7 +65,7 @@ import org.opensearch.search.suggest.completion.context.ContextMapping; import org.opensearch.search.suggest.completion.context.GeoContextMapping; import org.opensearch.test.InternalSettingsPlugin; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -100,7 +100,7 @@ import static org.hamcrest.Matchers.notNullValue; @SuppressCodecs("*") // requires custom completion format -public class CompletionSuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class CompletionSuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public CompletionSuggestSearchIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index bac3e7fb61683..0a922a657986c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -53,7 +53,7 @@ import org.opensearch.search.suggest.completion.context.ContextMapping; import org.opensearch.search.suggest.completion.context.GeoContextMapping; import org.opensearch.search.suggest.completion.context.GeoQueryContext; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -73,7 +73,7 @@ import static org.hamcrest.core.IsEqual.equalTo; @SuppressCodecs("*") // requires custom completion format -public class ContextCompletionSuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class ContextCompletionSuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ContextCompletionSuggestSearchIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index bc6e49008bf6b..a8c0f8a45fef8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -57,7 +57,7 @@ import org.opensearch.search.suggest.phrase.StupidBackoff; import org.opensearch.search.suggest.term.TermSuggestionBuilder; import org.opensearch.search.suggest.term.TermSuggestionBuilder.SuggestMode; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; import java.io.IOException; @@ -96,7 +96,7 @@ * possible these tests should declare for the first request, make the request, modify the configuration for the next request, make that * request, modify again, request again, etc. This makes it very obvious what changes between requests. */ -public class SuggestSearchIT extends ParameterizedOpenSearchIntegTestCase { +public class SuggestSearchIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SuggestSearchIT(Settings settings) { super(settings); } diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index 8c9bff9833462..c42fa55887966 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -37,7 +37,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,7 +49,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; -public class SimilarityIT extends ParameterizedOpenSearchIntegTestCase { +public class SimilarityIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public SimilarityIT(Settings settings) { super(settings); } diff --git a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java index 4b0bde0984ad1..00935ae0940a1 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -66,7 +66,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.io.IOException; import java.util.ArrayList; @@ -82,10 +82,10 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; -public abstract class AbstractTermVectorsTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractTermVectorsTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { - public AbstractTermVectorsTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public AbstractTermVectorsTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index 21d05305eed1b..9c3a825eb6d86 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -38,7 +38,7 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -52,7 +52,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.is; -public abstract class ShardSizeTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class ShardSizeTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public ShardSizeTestCase(Settings dynamicSettings) { super(dynamicSettings); diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index e02c00005df9b..b058d96aa7327 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -48,7 +48,7 @@ import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import org.opensearch.test.geo.RandomGeoGenerator; import java.util.ArrayList; @@ -65,7 +65,7 @@ import static org.hamcrest.Matchers.equalTo; @OpenSearchIntegTestCase.SuiteScopeTestCase -public abstract class AbstractGeoTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractGeoTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value"; protected static final String MULTI_VALUED_FIELD_NAME = "geo_values"; diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java index 8e94f2cacf070..cdf4d0db6385f 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java @@ -40,7 +40,7 @@ import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; import java.util.Collection; @@ -49,7 +49,7 @@ import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -public abstract class AbstractTermsTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractTermsTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { public AbstractTermsTestCase(Settings dynamicSettings) { super(dynamicSettings); diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index 103b67e2782de..d2957276700c0 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -37,7 +37,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; import java.util.Arrays; @@ -48,11 +48,11 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; @OpenSearchIntegTestCase.SuiteScopeTestCase -public abstract class AbstractNumericTestCase extends ParameterizedOpenSearchIntegTestCase { +public abstract class AbstractNumericTestCase extends ParameterizedStaticSettingsOpenSearchIntegTestCase { protected static long minValue, maxValue, minValues, maxValues; - public AbstractNumericTestCase(Settings dynamicSettings) { - super(dynamicSettings); + public AbstractNumericTestCase(Settings staticSettings) { + super(staticSettings); } @ParametersFactory diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 6215e84f42676..5e79396de8d8d 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -32,12 +32,10 @@ package org.opensearch.test; -import com.carrotsearch.randomizedtesting.RandomizedContext; import com.carrotsearch.randomizedtesting.annotations.TestGroup; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.hc.core5.http.HttpHost; import org.apache.lucene.codecs.Codec; import org.apache.lucene.search.Sort; import org.apache.lucene.search.TotalHits; @@ -48,7 +46,6 @@ import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.node.hotthreads.NodeHotThreads; -import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.admin.cluster.state.ClusterStateResponse; @@ -77,7 +74,6 @@ import org.opensearch.client.ClusterAdminClient; import org.opensearch.client.Requests; import org.opensearch.client.RestClient; -import org.opensearch.client.RestClientBuilder; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.coordination.OpenSearchNodeCommand; @@ -96,7 +92,6 @@ import org.opensearch.common.Nullable; import org.opensearch.common.Priority; import org.opensearch.common.collect.Tuple; -import org.opensearch.common.network.NetworkAddress; import org.opensearch.common.network.NetworkModule; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.FeatureFlagSettings; @@ -106,7 +101,6 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.common.util.io.IOUtils; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.common.xcontent.XContentType; import org.opensearch.common.xcontent.smile.SmileXContent; @@ -128,7 +122,6 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.env.Environment; import org.opensearch.env.TestEnvironment; -import org.opensearch.http.HttpInfo; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; import org.opensearch.index.MergeSchedulerConfig; @@ -147,7 +140,6 @@ import org.opensearch.node.NodeMocksPlugin; import org.opensearch.plugins.NetworkPlugin; import org.opensearch.plugins.Plugin; -import org.opensearch.rest.action.RestCancellableNodeClient; import org.opensearch.script.MockScriptService; import org.opensearch.search.MockSearchService; import org.opensearch.search.SearchHit; @@ -158,17 +150,15 @@ import org.opensearch.test.disruption.ServiceDisruptionScheme; import org.opensearch.test.store.MockFSIndexStore; import org.opensearch.test.telemetry.MockTelemetryPlugin; -import org.opensearch.test.telemetry.tracing.StrictCheckSpanProcessor; import org.opensearch.test.transport.MockTransportService; import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportRequest; import org.opensearch.transport.TransportRequestHandler; import org.opensearch.transport.TransportService; import org.hamcrest.Matchers; -import org.junit.After; import org.junit.AfterClass; -import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Rule; import java.io.IOException; import java.lang.Runtime.Version; @@ -189,13 +179,11 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.IdentityHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Random; import java.util.Set; -import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -216,7 +204,6 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoTimeout; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.emptyIterable; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -334,6 +321,10 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { public static final String TESTS_ENABLE_MOCK_MODULES = "tests.enable_mock_modules"; private static final boolean MOCK_MODULES_ENABLED = "true".equals(System.getProperty(TESTS_ENABLE_MOCK_MODULES, "true")); + + @Rule + public static OpenSearchTestClusterRule testClusterRule = new OpenSearchTestClusterRule(); + /** * Threshold at which indexing switches from frequently async to frequently bulk. */ @@ -369,22 +360,9 @@ public abstract class OpenSearchIntegTestCase extends OpenSearchTestCase { */ public static final String TESTS_CLUSTER_NAME = "tests.clustername"; - /** - * The current cluster depending on the configured {@link Scope}. - * By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster. - */ - private static TestCluster currentCluster; - private static RestClient restClient = null; - - private static final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); - - private static OpenSearchIntegTestCase INSTANCE = null; // see @SuiteScope - private static Long SUITE_SEED = null; - @BeforeClass public static void beforeClass() throws Exception { - SUITE_SEED = randomLong(); - initializeSuiteScope(); + testClusterRule.beforeClass(); } @Override @@ -394,36 +372,6 @@ protected final boolean enableWarningsCheck() { return false; } - protected final void beforeInternal() throws Exception { - final Scope currentClusterScope = getCurrentClusterScope(); - Callable<Void> setup = () -> { - cluster().beforeTest(random()); - cluster().wipe(excludeTemplates()); - randomIndexTemplate(); - return null; - }; - switch (currentClusterScope) { - case SUITE: - assert SUITE_SEED != null : "Suite seed was not initialized"; - currentCluster = buildAndPutCluster(currentClusterScope, SUITE_SEED); - RandomizedContext.current().runWithPrivateRandomness(SUITE_SEED, setup); - break; - case TEST: - currentCluster = buildAndPutCluster(currentClusterScope, randomLong()); - setup.call(); - break; - } - - } - - private void printTestMessage(String message) { - if (isSuiteScopedTest(getClass()) && (getTestName().equals("<unknown>"))) { - logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message); - } else { - logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), getTestName(), message); - } - } - /** * Creates a randomized index template. This template is used to pass in randomized settings on a * per index basis. Allows to enable/disable the randomization for number of shards and replicas @@ -547,85 +495,6 @@ private static Settings.Builder setRandomIndexTranslogSettings(Random random, Se return builder; } - private TestCluster buildWithPrivateContext(final Scope scope, final long seed) throws Exception { - return RandomizedContext.current().runWithPrivateRandomness(seed, () -> buildTestCluster(scope, seed)); - } - - private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed) throws Exception { - final Class<?> clazz = this.getClass(); - TestCluster testCluster = clusters.remove(clazz); // remove this cluster first - clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere - switch (currentClusterScope) { - case SUITE: - if (testCluster == null) { // only build if it's not there yet - testCluster = buildWithPrivateContext(currentClusterScope, seed); - } - break; - case TEST: - // close the previous one and create a new one - IOUtils.closeWhileHandlingException(testCluster); - testCluster = buildTestCluster(currentClusterScope, seed); - break; - } - clusters.put(clazz, testCluster); - return testCluster; - } - - private static void clearClusters() throws Exception { - if (!clusters.isEmpty()) { - IOUtils.close(clusters.values()); - clusters.clear(); - } - if (restClient != null) { - restClient.close(); - restClient = null; - } - assertBusy(() -> { - int numChannels = RestCancellableNodeClient.getNumChannels(); - assertEquals( - numChannels - + " channels still being tracked in " - + RestCancellableNodeClient.class.getSimpleName() - + " while there should be none", - 0, - numChannels - ); - }); - } - - private void afterInternal(boolean afterClass) throws Exception { - final Scope currentClusterScope = getCurrentClusterScope(); - if (isInternalCluster()) { - internalCluster().clearDisruptionScheme(); - } - try { - if (cluster() != null) { - if (currentClusterScope != Scope.TEST) { - Metadata metadata = client().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); - - final Set<String> persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); - assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); - - final Set<String> transientKeys = new HashSet<>(metadata.transientSettings().keySet()); - assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); - } - ensureClusterSizeConsistency(); - ensureClusterStateConsistency(); - ensureClusterStateCanBeReadByNodeTool(); - beforeIndexDeletion(); - cluster().wipe(excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete - if (afterClass || currentClusterScope == Scope.TEST) { - cluster().close(); - } - cluster().assertAfterTest(); - } - } finally { - if (currentClusterScope == Scope.TEST) { - clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST - } - } - } - /** * @return An exclude set of index templates that will not be removed in between tests. */ @@ -638,18 +507,15 @@ protected void beforeIndexDeletion() throws Exception { } public static TestCluster cluster() { - return currentCluster; + return testClusterRule.cluster(); } public static boolean isInternalCluster() { - return (currentCluster instanceof InternalTestCluster); + return testClusterRule.isInternalCluster(); } public static InternalTestCluster internalCluster() { - if (!isInternalCluster()) { - throw new UnsupportedOperationException("current test cluster is immutable"); - } - return (InternalTestCluster) currentCluster; + return testClusterRule.internalCluster().orElseThrow(() -> new UnsupportedOperationException("current test cluster is immutable")); } public ClusterService clusterService() { @@ -661,14 +527,7 @@ public static Client client() { } public static Client client(@Nullable String node) { - if (node != null) { - return internalCluster().client(node); - } - Client client = cluster().client(); - if (frequently()) { - client = new RandomizingClient(client, random()); - } - return client; + return testClusterRule.clientForNode(node); } public static Client dataNodeClient() { @@ -1928,7 +1787,7 @@ public void clearScroll(String... scrollIds) { assertThat(clearResponse.isSucceeded(), equalTo(true)); } - private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) { + static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> annotationClass) { if (clazz == Object.class || clazz == OpenSearchIntegTestCase.class) { return null; } @@ -1939,16 +1798,6 @@ private static <A extends Annotation> A getAnnotation(Class<?> clazz, Class<A> a return getAnnotation(clazz.getSuperclass(), annotationClass); } - private Scope getCurrentClusterScope() { - return getCurrentClusterScope(this.getClass()); - } - - private static Scope getCurrentClusterScope(Class<?> clazz) { - ClusterScope annotation = getAnnotation(clazz, ClusterScope.class); - // if we are not annotated assume suite! - return annotation == null ? Scope.SUITE : annotation.scope(); - } - private boolean getSupportsDedicatedClusterManagers() { ClusterScope annotation = getAnnotation(this.getClass(), ClusterScope.class); return annotation == null ? true : annotation.supportsDedicatedMasters(); @@ -2270,10 +2119,9 @@ public <T extends TransportRequest> TransportRequestHandler<T> interceptHandler( * Returns path to a random directory that can be used to create a temporary file system repo */ public Path randomRepoPath() { - if (currentCluster instanceof InternalTestCluster) { - return randomRepoPath(((InternalTestCluster) currentCluster).getDefaultSettings()); - } - throw new UnsupportedOperationException("unsupported cluster type"); + return testClusterRule.internalCluster() + .map(c -> randomRepoPath(c.getDefaultSettings())) + .orElseThrow(() -> new UnsupportedOperationException("unsupported cluster type")); } /** @@ -2347,83 +2195,9 @@ private NumShards(int numPrimaries, int numReplicas) { } } - private static boolean runTestScopeLifecycle() { - return INSTANCE == null; - } - - @Before - public final void setupTestCluster() throws Exception { - if (runTestScopeLifecycle()) { - printTestMessage("setting up"); - beforeInternal(); - printTestMessage("all set up"); - } - } - - @After - public final void cleanUpCluster() throws Exception { - // Deleting indices is going to clear search contexts implicitly so we - // need to check that there are no more in-flight search contexts before - // we remove indices - if (isInternalCluster()) { - internalCluster().setBootstrapClusterManagerNodeIndex(-1); - } - super.ensureAllSearchContextsReleased(); - if (runTestScopeLifecycle()) { - printTestMessage("cleaning up after"); - afterInternal(false); - printTestMessage("cleaned up after"); - } - } - @AfterClass public static void afterClass() throws Exception { - try { - if (runTestScopeLifecycle()) { - clearClusters(); - } else { - INSTANCE.printTestMessage("cleaning up after"); - INSTANCE.afterInternal(true); - checkStaticState(true); - } - StrictCheckSpanProcessor.validateTracingStateOnShutdown(); - } finally { - SUITE_SEED = null; - currentCluster = null; - INSTANCE = null; - } - } - - private static void initializeSuiteScope() throws Exception { - Class<?> targetClass = getTestClass(); - /* - Note we create these test class instance via reflection - since JUnit creates a new instance per test and that is also - the reason why INSTANCE is static since this entire method - must be executed in a static context. - */ - assert INSTANCE == null; - if (isSuiteScopedTest(targetClass)) { - // note we need to do this way to make sure this is reproducible - if (isSuiteScopedTestParameterized(targetClass)) { - INSTANCE = (OpenSearchIntegTestCase) targetClass.getConstructor(Settings.class).newInstance(Settings.EMPTY); - } else { - INSTANCE = (OpenSearchIntegTestCase) targetClass.getConstructor().newInstance(); - } - boolean success = false; - try { - INSTANCE.printTestMessage("setup"); - INSTANCE.beforeInternal(); - INSTANCE.setupSuiteScopeCluster(); - success = true; - } finally { - if (!success) { - afterClass(); - } - } - } else { - INSTANCE = null; - } + testClusterRule.afterClass(); } /** @@ -2455,41 +2229,8 @@ protected boolean forbidPrivateIndexSettings() { * The returned client gets automatically closed when needed, it shouldn't be closed as part of tests otherwise * it cannot be reused by other tests anymore. */ - protected static synchronized RestClient getRestClient() { - if (restClient == null) { - restClient = createRestClient(); - } - return restClient; - } - - protected static RestClient createRestClient() { - return createRestClient(null, "http"); - } - - protected static RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { - NodesInfoResponse nodesInfoResponse = client().admin().cluster().prepareNodesInfo().get(); - assertFalse(nodesInfoResponse.hasFailures()); - return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol); - } - - protected static RestClient createRestClient( - final List<NodeInfo> nodes, - RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, - String protocol - ) { - List<HttpHost> hosts = new ArrayList<>(); - for (NodeInfo node : nodes) { - if (node.getInfo(HttpInfo.class) != null) { - TransportAddress publishAddress = node.getInfo(HttpInfo.class).address().publishAddress(); - InetSocketAddress address = publishAddress.address(); - hosts.add(new HttpHost(protocol, NetworkAddress.format(address.getAddress()), address.getPort())); - } - } - RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[0])); - if (httpClientConfigCallback != null) { - builder.setHttpClientConfigCallback(httpClientConfigCallback); - } - return builder.build(); + protected static RestClient getRestClient() { + return testClusterRule.getRestClient(); } /** @@ -2500,20 +2241,6 @@ protected static RestClient createRestClient( */ protected void setupSuiteScopeCluster() throws Exception {} - private static boolean isSuiteScopedTest(Class<?> clazz) { - return clazz.getAnnotation(SuiteScopeTestCase.class) != null; - } - - /* - * For tests defined with, SuiteScopeTestCase return true if the - * class has a constructor that takes a single Settings parameter - * */ - private static boolean isSuiteScopedTestParameterized(Class<?> clazz) { - return Arrays.stream(clazz.getConstructors()) - .filter(x -> x.getParameterTypes().length == 1) - .anyMatch(x -> x.getParameterTypes()[0].equals(Settings.class)); - } - /** * If a test is annotated with {@link SuiteScopeTestCase} * the checks and modifications that are applied to the used test cluster are only done after all tests diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java new file mode 100644 index 0000000000000..dcdca8ab35dfc --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java @@ -0,0 +1,397 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.RandomizedContext; + +import org.apache.hc.core5.http.HttpHost; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.client.Client; +import org.opensearch.client.RestClient; +import org.opensearch.client.RestClientBuilder; +import org.opensearch.cluster.metadata.Metadata; +import org.opensearch.common.Nullable; +import org.opensearch.common.network.NetworkAddress; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.common.transport.TransportAddress; +import org.opensearch.http.HttpInfo; +import org.opensearch.rest.action.RestCancellableNodeClient; +import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; +import org.opensearch.test.OpenSearchIntegTestCase.Scope; +import org.opensearch.test.OpenSearchIntegTestCase.SuiteScopeTestCase; +import org.opensearch.test.client.RandomizingClient; +import org.opensearch.test.telemetry.tracing.StrictCheckSpanProcessor; +import org.junit.rules.MethodRule; +import org.junit.runners.model.FrameworkMethod; +import org.junit.runners.model.MultipleFailureException; +import org.junit.runners.model.Statement; + +import java.net.InetSocketAddress; +import java.util.ArrayList; +import java.util.HashSet; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Callable; + +import static org.hamcrest.Matchers.empty; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThat; + +/** + * The JUnit {@link MethodRule} that handles test method scoped and test suite scoped clusters for integration (internal cluster) tests. There rule is + * injected into {@link OpenSearchIntegTestCase} that every integration test suite should be subclassing. In case of the parameterized test suites, + * please subclass {@link ParameterizedStaticSettingsOpenSearchIntegTestCase} or {@link ParameterizedDynamicSettingsOpenSearchIntegTestCase}, depending + * on the way cluster settings are being managed. + */ +class OpenSearchTestClusterRule implements MethodRule { + private final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); + private final Logger logger = LogManager.getLogger(getClass()); + + /** + * The current cluster depending on the configured {@link Scope}. + * By default if no {@link ClusterScope} is configured this will hold a reference to the suite cluster. + */ + private TestCluster currentCluster = null; + private RestClient restClient = null; + + private OpenSearchIntegTestCase suiteInstance = null; // see @SuiteScope + private Long suiteSeed = null; + + @Override + public Statement apply(Statement base, FrameworkMethod method, Object target) { + return statement(base, method, target); + } + + void beforeClass() throws Exception { + suiteSeed = OpenSearchTestCase.randomLong(); + } + + void afterClass() throws Exception { + try { + if (runTestScopeLifecycle()) { + clearClusters(); + } else { + printTestMessage("cleaning up after"); + afterInternal(true, null); + OpenSearchTestCase.checkStaticState(true); + clusters.remove(getTestClass()); + } + StrictCheckSpanProcessor.validateTracingStateOnShutdown(); + } finally { + suiteSeed = null; + currentCluster = null; + suiteInstance = null; + } + } + + TestCluster cluster() { + return currentCluster; + } + + boolean isInternalCluster() { + return (cluster() instanceof InternalTestCluster); + } + + Optional<InternalTestCluster> internalCluster() { + if (!isInternalCluster()) { + return Optional.empty(); + } else { + return Optional.of((InternalTestCluster) cluster()); + } + } + + Client clientForAnyNode() { + return clientForNode(null); + } + + Client clientForNode(@Nullable String node) { + if (node != null) { + return internalCluster().orElseThrow(() -> new UnsupportedOperationException("current test cluster is immutable")).client(node); + } + Client client = cluster().client(); + if (OpenSearchTestCase.frequently()) { + client = new RandomizingClient(client, OpenSearchTestCase.random()); + } + return client; + } + + synchronized RestClient getRestClient() { + if (restClient == null) { + restClient = createRestClient(); + } + return restClient; + } + + protected final void beforeInternal(OpenSearchIntegTestCase target) throws Exception { + final Scope currentClusterScope = getClusterScope(target.getClass()); + Callable<Void> setup = () -> { + currentCluster.beforeTest(OpenSearchTestCase.random()); + currentCluster.wipe(target.excludeTemplates()); + target.randomIndexTemplate(); + return null; + }; + switch (currentClusterScope) { + case SUITE: + assert suiteSeed != null : "Suite seed was not initialized"; + currentCluster = buildAndPutCluster(currentClusterScope, suiteSeed, target); + RandomizedContext.current().runWithPrivateRandomness(suiteSeed, setup); + break; + case TEST: + currentCluster = buildAndPutCluster(currentClusterScope, OpenSearchTestCase.randomLong(), target); + setup.call(); + break; + } + } + + protected void before(Object target, FrameworkMethod method) throws Throwable { + final OpenSearchIntegTestCase instance = (OpenSearchIntegTestCase) target; + initializeSuiteScope(instance, method); + + if (runTestScopeLifecycle()) { + printTestMessage("setting up", method); + beforeInternal(instance); + printTestMessage("all set up", method); + } + } + + protected void after(Object target, FrameworkMethod method) throws Exception { + final OpenSearchIntegTestCase instance = (OpenSearchIntegTestCase) target; + + // Deleting indices is going to clear search contexts implicitly so we + // need to check that there are no more in-flight search contexts before + // we remove indices + internalCluster().ifPresent(c -> c.setBootstrapClusterManagerNodeIndex(-1)); + + instance.ensureAllSearchContextsReleased(); + if (runTestScopeLifecycle()) { + printTestMessage("cleaning up after", method); + afterInternal(false, instance); + printTestMessage("cleaned up after", method); + } + } + + protected RestClient createRestClient() { + return createRestClient(null, "http"); + } + + protected RestClient createRestClient(RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, String protocol) { + NodesInfoResponse nodesInfoResponse = clientForAnyNode().admin().cluster().prepareNodesInfo().get(); + assertFalse(nodesInfoResponse.hasFailures()); + return createRestClient(nodesInfoResponse.getNodes(), httpClientConfigCallback, protocol); + } + + protected RestClient createRestClient( + final List<NodeInfo> nodes, + RestClientBuilder.HttpClientConfigCallback httpClientConfigCallback, + String protocol + ) { + List<HttpHost> hosts = new ArrayList<>(); + for (NodeInfo node : nodes) { + if (node.getInfo(HttpInfo.class) != null) { + TransportAddress publishAddress = node.getInfo(HttpInfo.class).address().publishAddress(); + InetSocketAddress address = publishAddress.address(); + hosts.add(new HttpHost(protocol, NetworkAddress.format(address.getAddress()), address.getPort())); + } + } + RestClientBuilder builder = RestClient.builder(hosts.toArray(new HttpHost[0])); + if (httpClientConfigCallback != null) { + builder.setHttpClientConfigCallback(httpClientConfigCallback); + } + return builder.build(); + } + + private Scope getClusterScope(Class<?> clazz) { + ClusterScope annotation = OpenSearchIntegTestCase.getAnnotation(clazz, ClusterScope.class); + // if we are not annotated assume suite! + return annotation == null ? Scope.SUITE : annotation.scope(); + } + + private TestCluster buildWithPrivateContext(final Scope scope, final long seed, OpenSearchIntegTestCase target) throws Exception { + return RandomizedContext.current().runWithPrivateRandomness(seed, () -> target.buildTestCluster(scope, seed)); + } + + private static boolean isSuiteScopedTest(Class<?> clazz) { + return clazz.getAnnotation(SuiteScopeTestCase.class) != null; + } + + private boolean hasParametersChanged(final ParameterizedOpenSearchIntegTestCase target) { + return !((ParameterizedOpenSearchIntegTestCase) suiteInstance).hasSameParametersAs(target); + } + + private boolean runTestScopeLifecycle() { + return suiteInstance == null; + } + + private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed, OpenSearchIntegTestCase target) throws Exception { + final Class<?> clazz = target.getClass(); + + synchronized (clusters) { + TestCluster testCluster = clusters.remove(clazz); // remove this cluster first + clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere + switch (currentClusterScope) { + case SUITE: + if (testCluster == null) { // only build if it's not there yet + testCluster = buildWithPrivateContext(currentClusterScope, seed, target); + } + break; + case TEST: + // close the previous one and create a new one + IOUtils.closeWhileHandlingException(testCluster); + testCluster = target.buildTestCluster(currentClusterScope, seed); + break; + } + clusters.put(clazz, testCluster); + return testCluster; + } + } + + private void printTestMessage(String message) { + logger.info("[{}]: {} suite", getTestClass().getSimpleName(), message); + } + + private static Class<?> getTestClass() { + return OpenSearchTestCase.getTestClass(); + } + + private void printTestMessage(String message, FrameworkMethod method) { + logger.info("[{}#{}]: {} test", getTestClass().getSimpleName(), method.getName(), message); + } + + private void afterInternal(boolean afterClass, OpenSearchIntegTestCase target) throws Exception { + final Scope currentClusterScope = getClusterScope(getTestClass()); + internalCluster().ifPresent(InternalTestCluster::clearDisruptionScheme); + + OpenSearchIntegTestCase instance = suiteInstance; + if (instance == null) { + instance = target; + } + + try { + if (cluster() != null) { + if (currentClusterScope != Scope.TEST) { + Metadata metadata = clientForAnyNode().admin().cluster().prepareState().execute().actionGet().getState().getMetadata(); + + final Set<String> persistentKeys = new HashSet<>(metadata.persistentSettings().keySet()); + assertThat("test leaves persistent cluster metadata behind", persistentKeys, empty()); + + final Set<String> transientKeys = new HashSet<>(metadata.transientSettings().keySet()); + assertThat("test leaves transient cluster metadata behind", transientKeys, empty()); + } + instance.ensureClusterSizeConsistency(); + instance.ensureClusterStateConsistency(); + instance.ensureClusterStateCanBeReadByNodeTool(); + instance.beforeIndexDeletion(); + cluster().wipe(instance.excludeTemplates()); // wipe after to make sure we fail in the test that didn't ack the delete + if (afterClass || currentClusterScope == Scope.TEST) { + cluster().close(); + } + cluster().assertAfterTest(); + } + } finally { + if (currentClusterScope == Scope.TEST) { + clearClusters(); // it is ok to leave persistent / transient cluster state behind if scope is TEST + } + } + } + + private void clearClusters() throws Exception { + synchronized (clusters) { + if (!clusters.isEmpty()) { + IOUtils.close(clusters.values()); + clusters.clear(); + } + } + if (restClient != null) { + restClient.close(); + restClient = null; + } + OpenSearchTestCase.assertBusy(() -> { + int numChannels = RestCancellableNodeClient.getNumChannels(); + OpenSearchTestCase.assertEquals( + numChannels + + " channels still being tracked in " + + RestCancellableNodeClient.class.getSimpleName() + + " while there should be none", + 0, + numChannels + ); + }); + } + + private Statement statement(final Statement base, FrameworkMethod method, Object target) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + before(target, method); + + List<Throwable> errors = new ArrayList<Throwable>(); + try { + base.evaluate(); + } catch (Throwable t) { + errors.add(t); + } finally { + try { + after(target, method); + } catch (Throwable t) { + errors.add(t); + } + } + MultipleFailureException.assertEmpty(errors); + } + }; + } + + private void initializeSuiteScope(OpenSearchIntegTestCase target, FrameworkMethod method) throws Exception { + final Class<?> targetClass = getTestClass(); + /* + Note we create these test class instance via reflection + since JUnit creates a new instance per test. + */ + if (suiteInstance != null) { + // Catching the case when parameterized test cases are run: the test class stays the same but the test instances changes. + if (target instanceof ParameterizedOpenSearchIntegTestCase) { + assert suiteInstance instanceof ParameterizedOpenSearchIntegTestCase; + if (hasParametersChanged((ParameterizedOpenSearchIntegTestCase) target)) { + printTestMessage("new instance of parameterized test class, recreating cluster scope", method); + afterClass(); + beforeClass(); + } else { + return; /* same test class instance */ + } + } else { + return; /* not a parameterized test */ + } + } + + assert suiteInstance == null; + if (isSuiteScopedTest(targetClass)) { + suiteInstance = target; + + boolean success = false; + try { + printTestMessage("setup", method); + beforeInternal(target); + suiteInstance.setupSuiteScopeCluster(); + success = true; + } finally { + if (!success) { + afterClass(); + } + } + } else { + suiteInstance = null; + } + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java new file mode 100644 index 0000000000000..b31dfa2bdefa5 --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTestCase.java @@ -0,0 +1,66 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsModule; +import org.junit.After; +import org.junit.Before; + +/** + * Base class for running the tests with parameterization using dynamic settings: the cluster will be created once before the test suite and the + * settings will be applied dynamically, please notice that not all settings could be changed dynamically (consider using {@link ParameterizedStaticSettingsOpenSearchIntegTestCase} + * instead). + * <p> + * Here is the simple illustration on of the execution flow per parameters combination: + * <ul> + * <li><b>suite scope</b>: create cluster -> for each test method { apply settings -> run test method -> unapply settings } -> shutdown cluster</li> + * <li><b>test scope</b>: for each test method { create cluster -> apply settings -> run test method -> unapply settings -> shutdown cluster }</li> + * </ul> + */ +public abstract class ParameterizedDynamicSettingsOpenSearchIntegTestCase extends ParameterizedOpenSearchIntegTestCase { + public ParameterizedDynamicSettingsOpenSearchIntegTestCase(Settings dynamicSettings) { + super(dynamicSettings); + } + + @Before + public void beforeTests() { + SettingsModule settingsModule = new SettingsModule(settings); + for (String key : settings.keySet()) { + assertTrue( + settingsModule.getClusterSettings().isDynamicSetting(key) || settingsModule.getIndexScopedSettings().isDynamicSetting(key) + ); + } + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settings).get(); + } + + @After + public void afterTests() { + final Settings.Builder settingsToUnset = Settings.builder(); + settings.keySet().forEach(settingsToUnset::putNull); + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settingsToUnset).get(); + } + + @Override + boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj) { + if (this == obj) { + return true; + } + + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + return true; + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java index edda6bf5603f7..88a44f87952f5 100644 --- a/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedOpenSearchIntegTestCase.java @@ -9,48 +9,43 @@ package org.opensearch.test; import org.opensearch.common.settings.Settings; -import org.opensearch.common.settings.SettingsModule; -import org.junit.After; -import org.junit.Before; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; /** - * Base class for running the tests with parameterization of the dynamic settings - * For any class that wants to use parameterization, use @ParametersFactory to generate - * different params only for dynamic settings. Refer SearchCancellationIT for an example. - * Note: this doesn't work for the parameterization of feature flag/static settings. + * Base class for running the tests with parameterization of the settings. + * For any class that wants to use parameterization, use {@link com.carrotsearch.randomizedtesting.annotations.ParametersFactory} to generate + * different parameters. + * + * There are two flavors of applying the parameterized settings to the cluster on the suite level: + * - static: the cluster will be pre-created with the settings at startup, please subclass {@link ParameterizedStaticSettingsOpenSearchIntegTestCase}, the method + * {@link #hasSameParametersAs(ParameterizedOpenSearchIntegTestCase)} is being used by the test scaffolding to detect when the test suite is instantiated with + * the new parameters and the test cluster has to be recreated + * - dynamic: the cluster will be created once before the test suite and the settings will be applied dynamically , please subclass {@link ParameterizedDynamicSettingsOpenSearchIntegTestCase}, + * please notice that not all settings could be changed dynamically + * + * If the test suites use per-test level, the cluster will be recreated per each test method (applying static or dynamic settings). */ -public abstract class ParameterizedOpenSearchIntegTestCase extends OpenSearchIntegTestCase { - - private final Settings dynamicSettings; +abstract class ParameterizedOpenSearchIntegTestCase extends OpenSearchIntegTestCase { + protected final Settings settings; - public ParameterizedOpenSearchIntegTestCase(Settings dynamicSettings) { - this.dynamicSettings = dynamicSettings; - } - - @Before - public void beforeTests() { - SettingsModule settingsModule = new SettingsModule(dynamicSettings); - for (String key : dynamicSettings.keySet()) { - assertTrue( - settingsModule.getClusterSettings().isDynamicSetting(key) || settingsModule.getIndexScopedSettings().isDynamicSetting(key) - ); - } - client().admin().cluster().prepareUpdateSettings().setPersistentSettings(dynamicSettings).get(); - } - - @After - public void afterTests() { - final Settings.Builder settingsToUnset = Settings.builder(); - dynamicSettings.keySet().forEach(settingsToUnset::putNull); - client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settingsToUnset).get(); + ParameterizedOpenSearchIntegTestCase(Settings settings) { + this.settings = settings; } // This method shouldn't be called in setupSuiteScopeCluster(). Only call this method inside single test. public void indexRandomForConcurrentSearch(String... indices) throws InterruptedException { - if (dynamicSettings.get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()).equals("true")) { + if (settings.get(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey()).equals("true")) { indexRandomForMultipleSlices(indices); } } + + /** + * Compares the parameters of the two {@link ParameterizedOpenSearchIntegTestCase} test suite instances. + * This method is being use by {@link OpenSearchTestClusterRule} to determine when the parameterized test suite is instantiated with + * another set of parameters and the test cluster has to be recreated to reflect that. + * @param obj instance of the {@link ParameterizedOpenSearchIntegTestCase} to compare with + * @return {@code true} of the parameters of the test suites are the same, {@code false} otherwise + */ + abstract boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj); } diff --git a/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java new file mode 100644 index 0000000000000..2e97228326314 --- /dev/null +++ b/test/framework/src/main/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTestCase.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import org.opensearch.common.settings.Settings; + +import java.util.Objects; + +/** + * Base class for running the tests with parameterization with static settings: the cluster will be pre-created with the settings at startup, the method + * {@link #hasSameParametersAs(ParameterizedOpenSearchIntegTestCase)} is being used by the test scaffolding to detect when the test suite is instantiated with + * the new parameters and the test cluster has to be recreated. + * <p> + * Here is the simple illustration on of the execution flow per parameters combination: + * <ul> + * <li><b>suite scope</b>: create cluster -> for each test method { run test method } -> shutdown cluster</li> + * <li><b>test scope</b>: for each test method { create cluster -> run test method -> shutdown cluster }</li> + * </ul> + */ +public abstract class ParameterizedStaticSettingsOpenSearchIntegTestCase extends ParameterizedOpenSearchIntegTestCase { + public ParameterizedStaticSettingsOpenSearchIntegTestCase(Settings nodeSettings) { + super(nodeSettings); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)).put(settings).build(); + } + + @Override + boolean hasSameParametersAs(ParameterizedOpenSearchIntegTestCase obj) { + if (this == obj) { + return true; + } + + if (obj == null) { + return false; + } + + if (getClass() != obj.getClass()) { + return false; + } + + final ParameterizedStaticSettingsOpenSearchIntegTestCase other = (ParameterizedStaticSettingsOpenSearchIntegTestCase) obj; + return Objects.equals(settings, other.settings); + } +} From 6ccb46ba04b5d295af4c4da884068f1a5cf6cfba Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" <dblock@amazon.com> Date: Thu, 18 Jan 2024 17:11:49 -0500 Subject: [PATCH 106/178] Expose RestController all handlers iterator. (#11876) * Expose RestController all handlers iterator. Signed-off-by: dblock <dblock@amazon.com> * Extract MethodHandlers interface. Signed-off-by: dblock <dblock@amazon.com> * Added CHANGELOG. Signed-off-by: dblock <dblock@amazon.com> --------- Signed-off-by: dblock <dblock@amazon.com> --- CHANGELOG.md | 1 + .../org/opensearch/common/path/PathTrie.java | 42 +++++++++ .../org/opensearch/rest/MethodHandlers.java | 74 ++------------- .../org/opensearch/rest/RestController.java | 23 +++-- .../opensearch/rest/RestMethodHandlers.java | 92 +++++++++++++++++++ .../opensearch/common/path/PathTrieTests.java | 31 +++++++ .../opensearch/rest/RestControllerTests.java | 37 +++++++- 7 files changed, 225 insertions(+), 75 deletions(-) create mode 100644 server/src/main/java/org/opensearch/rest/RestMethodHandlers.java diff --git a/CHANGELOG.md b/CHANGELOG.md index be2a639892aa9..822e4196adf5e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -132,6 +132,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) - Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) +- Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/server/src/main/java/org/opensearch/common/path/PathTrie.java b/server/src/main/java/org/opensearch/common/path/PathTrie.java index 7cb7b46acfafe..0b516fa037c48 100644 --- a/server/src/main/java/org/opensearch/common/path/PathTrie.java +++ b/server/src/main/java/org/opensearch/common/path/PathTrie.java @@ -37,6 +37,7 @@ import java.util.Iterator; import java.util.Map; import java.util.NoSuchElementException; +import java.util.Stack; import java.util.function.BiFunction; import java.util.function.Supplier; @@ -405,4 +406,45 @@ public T next() { } }; } + + public Iterator<T> retrieveAll() { + Stack<TrieNode> stack = new Stack<>(); + stack.add(root); + + return new Iterator<T>() { + @Override + public boolean hasNext() { + while (!stack.empty()) { + TrieNode node = stack.peek(); + + if (node.value != null) { + return true; + } + + advance(); + } + + return false; + } + + @Override + public T next() { + while (!stack.empty()) { + TrieNode node = advance(); + + if (node.value != null) { + return node.value; + } + } + + throw new NoSuchElementException("called next() without validating hasNext()! no more nodes available"); + } + + private TrieNode advance() { + TrieNode node = stack.pop(); + stack.addAll(node.children.values()); + return node; + } + }; + } } diff --git a/server/src/main/java/org/opensearch/rest/MethodHandlers.java b/server/src/main/java/org/opensearch/rest/MethodHandlers.java index 8c29bf2e66036..30221705e1aba 100644 --- a/server/src/main/java/org/opensearch/rest/MethodHandlers.java +++ b/server/src/main/java/org/opensearch/rest/MethodHandlers.java @@ -6,82 +6,24 @@ * compatible open source license. */ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - package org.opensearch.rest; -import org.opensearch.common.Nullable; +import org.opensearch.common.annotation.PublicApi; -import java.util.HashMap; -import java.util.Map; import java.util.Set; /** - * Encapsulate multiple handlers for the same path, allowing different handlers for different HTTP verbs. - * - * @opensearch.api + * A collection of REST method handlers. */ -final class MethodHandlers { - - private final String path; - private final Map<RestRequest.Method, RestHandler> methodHandlers; - - MethodHandlers(String path, RestHandler handler, RestRequest.Method... methods) { - this.path = path; - this.methodHandlers = new HashMap<>(methods.length); - for (RestRequest.Method method : methods) { - methodHandlers.put(method, handler); - } - } - - /** - * Add a handler for an additional array of methods. Note that {@code MethodHandlers} - * does not allow replacing the handler for an already existing method. - */ - MethodHandlers addMethods(RestHandler handler, RestRequest.Method... methods) { - for (RestRequest.Method method : methods) { - RestHandler existing = methodHandlers.putIfAbsent(method, handler); - if (existing != null) { - throw new IllegalArgumentException("Cannot replace existing handler for [" + path + "] for method: " + method); - } - } - return this; - } - +@PublicApi(since = "2.12.0") +public interface MethodHandlers { /** - * Returns the handler for the given method or {@code null} if none exists. + * Return a set of all valid HTTP methods for the particular path. */ - @Nullable - RestHandler getHandler(RestRequest.Method method) { - return methodHandlers.get(method); - } + Set<RestRequest.Method> getValidMethods(); /** - * Return a set of all valid HTTP methods for the particular path + * Returns the relative HTTP path of the set of method handlers. */ - Set<RestRequest.Method> getValidMethods() { - return methodHandlers.keySet(); - } + String getPath(); } diff --git a/server/src/main/java/org/opensearch/rest/RestController.java b/server/src/main/java/org/opensearch/rest/RestController.java index cc48b59699a17..95abb9b3daeca 100644 --- a/server/src/main/java/org/opensearch/rest/RestController.java +++ b/server/src/main/java/org/opensearch/rest/RestController.java @@ -65,6 +65,7 @@ import java.io.IOException; import java.io.InputStream; import java.net.URI; +import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; @@ -107,7 +108,7 @@ public class RestController implements HttpServerTransport.Dispatcher { } } - private final PathTrie<MethodHandlers> handlers = new PathTrie<>(RestUtils.REST_DECODER); + private final PathTrie<RestMethodHandlers> handlers = new PathTrie<>(RestUtils.REST_DECODER); private final UnaryOperator<RestHandler> handlerWrapper; @@ -144,6 +145,16 @@ public RestController( ); } + /** + * Returns an iterator over registered REST method handlers. + * @return {@link Iterator} of {@link MethodHandlers} + */ + public Iterator<MethodHandlers> getAllHandlers() { + List<MethodHandlers> methodHandlers = new ArrayList<>(); + handlers.retrieveAll().forEachRemaining(methodHandlers::add); + return methodHandlers.iterator(); + } + /** * Registers a REST handler to be executed when the provided {@code method} and {@code path} match the request. * @@ -221,7 +232,7 @@ protected void registerHandler(RestRequest.Method method, String path, RestHandl private void registerHandlerNoWrap(RestRequest.Method method, String path, RestHandler maybeWrappedHandler) { handlers.insertOrUpdate( path, - new MethodHandlers(path, maybeWrappedHandler, method), + new RestMethodHandlers(path, maybeWrappedHandler, method), (mHandlers, newMHandler) -> mHandlers.addMethods(maybeWrappedHandler, method) ); } @@ -392,10 +403,10 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel // Resolves the HTTP method and fails if the method is invalid requestMethod = request.method(); // Loop through all possible handlers, attempting to dispatch the request - Iterator<MethodHandlers> allHandlers = getAllHandlers(request.params(), rawPath); + Iterator<RestMethodHandlers> allHandlers = getAllRestMethodHandlers(request.params(), rawPath); while (allHandlers.hasNext()) { final RestHandler handler; - final MethodHandlers handlers = allHandlers.next(); + final RestMethodHandlers handlers = allHandlers.next(); if (handlers == null) { handler = null; } else { @@ -423,7 +434,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } - Iterator<MethodHandlers> getAllHandlers(@Nullable Map<String, String> requestParamsRef, String rawPath) { + Iterator<RestMethodHandlers> getAllRestMethodHandlers(@Nullable Map<String, String> requestParamsRef, String rawPath) { final Supplier<Map<String, String>> paramsSupplier; if (requestParamsRef == null) { paramsSupplier = () -> null; @@ -561,7 +572,7 @@ private boolean handleAuthenticateUser(final RestRequest request, final RestChan */ private Set<RestRequest.Method> getValidHandlerMethodSet(String rawPath) { Set<RestRequest.Method> validMethods = new HashSet<>(); - Iterator<MethodHandlers> allHandlers = getAllHandlers(null, rawPath); + Iterator<RestMethodHandlers> allHandlers = getAllRestMethodHandlers(null, rawPath); while (allHandlers.hasNext()) { final MethodHandlers methodHandlers = allHandlers.next(); if (methodHandlers != null) { diff --git a/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java b/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java new file mode 100644 index 0000000000000..a430d8ace447c --- /dev/null +++ b/server/src/main/java/org/opensearch/rest/RestMethodHandlers.java @@ -0,0 +1,92 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.rest; + +import org.opensearch.common.Nullable; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +/** + * Encapsulate multiple handlers for the same path, allowing different handlers for different HTTP verbs. + */ +final class RestMethodHandlers implements MethodHandlers { + + private final String path; + private final Map<RestRequest.Method, RestHandler> methodHandlers; + + RestMethodHandlers(String path, RestHandler handler, RestRequest.Method... methods) { + this.path = path; + this.methodHandlers = new HashMap<>(methods.length); + for (RestRequest.Method method : methods) { + methodHandlers.put(method, handler); + } + } + + /** + * Add a handler for an additional array of methods. Note that {@code MethodHandlers} + * does not allow replacing the handler for an already existing method. + */ + public RestMethodHandlers addMethods(RestHandler handler, RestRequest.Method... methods) { + for (RestRequest.Method method : methods) { + RestHandler existing = methodHandlers.putIfAbsent(method, handler); + if (existing != null) { + throw new IllegalArgumentException("Cannot replace existing handler for [" + path + "] for method: " + method); + } + } + return this; + } + + /** + * Returns the handler for the given method or {@code null} if none exists. + */ + @Nullable + public RestHandler getHandler(RestRequest.Method method) { + return methodHandlers.get(method); + } + + /** + * Return a set of all valid HTTP methods for the particular path. + */ + public Set<RestRequest.Method> getValidMethods() { + return methodHandlers.keySet(); + } + + /** + * Returns the relative HTTP path of the set of method handlers. + */ + public String getPath() { + return path; + } +} diff --git a/server/src/test/java/org/opensearch/common/path/PathTrieTests.java b/server/src/test/java/org/opensearch/common/path/PathTrieTests.java index e366972feeaf2..2f0618ee299b4 100644 --- a/server/src/test/java/org/opensearch/common/path/PathTrieTests.java +++ b/server/src/test/java/org/opensearch/common/path/PathTrieTests.java @@ -36,8 +36,10 @@ import org.opensearch.rest.RestUtils; import org.opensearch.test.OpenSearchTestCase; +import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; import static org.hamcrest.Matchers.equalTo; @@ -286,4 +288,33 @@ public void testEscapedSlashWithinUrl() { assertThat(params.get("type"), equalTo("type")); assertThat(params.get("id"), equalTo("id")); } + + public void testRetrieveAllEmpty() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + Iterator<String> allPaths = trie.retrieveAll(); + assertFalse(allPaths.hasNext()); + } + + public void testRetrieveAll() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + trie.insert("{testA}", "test1"); + trie.insert("{testA}/{testB}", "test2"); + trie.insert("a/{testB}", "test3"); + trie.insert("{testA}/b", "test4"); + trie.insert("{testA}/b/c", "test5"); + + Iterator<String> iterator = trie.retrieveAll(); + assertTrue(iterator.hasNext()); + List<String> paths = new ArrayList<>(); + iterator.forEachRemaining(paths::add); + assertEquals(paths, List.of("test1", "test4", "test5", "test2", "test3")); + assertFalse(iterator.hasNext()); + } + + public void testRetrieveAllWithNllValue() { + PathTrie<String> trie = new PathTrie<>(NO_DECODER); + trie.insert("{testA}", null); + Iterator<String> iterator = trie.retrieveAll(); + assertFalse(iterator.hasNext()); + } } diff --git a/server/src/test/java/org/opensearch/rest/RestControllerTests.java b/server/src/test/java/org/opensearch/rest/RestControllerTests.java index 25405afa24c16..b7239e7b59742 100644 --- a/server/src/test/java/org/opensearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/opensearch/rest/RestControllerTests.java @@ -138,6 +138,37 @@ public void teardown() throws IOException { IOUtils.close(client); } + public void testDefaultRestControllerGetAllHandlersContainsFavicon() { + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, identityService); + Iterator<MethodHandlers> handlers = restController.getAllHandlers(); + assertTrue(handlers.hasNext()); + MethodHandlers faviconHandler = handlers.next(); + assertEquals(faviconHandler.getPath(), "/favicon.ico"); + assertEquals(faviconHandler.getValidMethods(), Set.of(RestRequest.Method.GET)); + assertFalse(handlers.hasNext()); + } + + public void testRestControllerGetAllHandlers() { + final RestController restController = new RestController(null, null, null, circuitBreakerService, usageService, identityService); + + restController.registerHandler(RestRequest.Method.PATCH, "/foo", mock(RestHandler.class)); + restController.registerHandler(RestRequest.Method.GET, "/foo", mock(RestHandler.class)); + + Iterator<MethodHandlers> handlers = restController.getAllHandlers(); + + assertTrue(handlers.hasNext()); + MethodHandlers rootHandler = handlers.next(); + assertEquals(rootHandler.getPath(), "/foo"); + assertEquals(rootHandler.getValidMethods(), Set.of(RestRequest.Method.GET, RestRequest.Method.PATCH)); + + assertTrue(handlers.hasNext()); + MethodHandlers faviconHandler = handlers.next(); + assertEquals(faviconHandler.getPath(), "/favicon.ico"); + assertEquals(faviconHandler.getValidMethods(), Set.of(RestRequest.Method.GET)); + + assertFalse(handlers.hasNext()); + } + public void testApplyRelevantHeaders() throws Exception { final ThreadContext threadContext = client.threadPool().getThreadContext(); Set<RestHeaderDefinition> headers = new HashSet<>( @@ -150,15 +181,15 @@ public void testApplyRelevantHeaders() throws Exception { restHeaders.put("header.3", Collections.singletonList("false")); RestRequest fakeRequest = new FakeRestRequest.Builder(xContentRegistry()).withHeaders(restHeaders).build(); final RestController spyRestController = spy(restController); - when(spyRestController.getAllHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<MethodHandlers>() { + when(spyRestController.getAllRestMethodHandlers(null, fakeRequest.rawPath())).thenReturn(new Iterator<RestMethodHandlers>() { @Override public boolean hasNext() { return false; } @Override - public MethodHandlers next() { - return new MethodHandlers("/", (RestRequest request, RestChannel channel, NodeClient client) -> { + public RestMethodHandlers next() { + return new RestMethodHandlers("/", (RestRequest request, RestChannel channel, NodeClient client) -> { assertEquals("true", threadContext.getHeader("header.1")); assertEquals("true", threadContext.getHeader("header.2")); assertNull(threadContext.getHeader("header.3")); From 774e7d41b5ab146f3316eab667b54efe9b16aa33 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 18 Jan 2024 18:38:48 -0500 Subject: [PATCH 107/178] Only check LongVector.SPECIES_PREFERRED when jdk.incubator.vector is enabled (#11939) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- .../common/round/RoundableFactory.java | 26 ++++++++++++++++--- 1 file changed, 22 insertions(+), 4 deletions(-) diff --git a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java index c5ec45bceb5bd..0709ed4374227 100644 --- a/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java +++ b/libs/common/src/main/java20/org/opensearch/common/round/RoundableFactory.java @@ -10,8 +10,6 @@ import org.opensearch.common.annotation.InternalApi; -import jdk.incubator.vector.LongVector; - /** * Factory class to create and return the fastest implementation of {@link Roundable}. * @@ -34,10 +32,30 @@ public final class RoundableFactory { */ private static final boolean USE_BTREE_SEARCHER; + /** + * This class is initialized only when: + * - JDK-20+ + * - jdk.incubator.vector.LongVector is available (--add-modules=jdk.incubator.vector is passed) + */ + private static final class VectorCheck { + final static int SPECIES_PREFERRED = jdk.incubator.vector.LongVector.SPECIES_PREFERRED.length(); + } + static { String simdRoundingFeatureFlag = System.getProperty("opensearch.experimental.feature.simd.rounding.enabled"); - USE_BTREE_SEARCHER = "forced".equalsIgnoreCase(simdRoundingFeatureFlag) - || (LongVector.SPECIES_PREFERRED.length() >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag)); + boolean useBtreeSearcher = false; + + try { + final Class<?> incubator = Class.forName("jdk.incubator.vector.LongVector"); + + useBtreeSearcher = "forced".equalsIgnoreCase(simdRoundingFeatureFlag) + || (VectorCheck.SPECIES_PREFERRED >= 4 && "true".equalsIgnoreCase(simdRoundingFeatureFlag)); + + } catch (final ClassNotFoundException ex) { + /* do not use BtreeSearcher */ + } + + USE_BTREE_SEARCHER = useBtreeSearcher; } private RoundableFactory() {} From e265355e8cf8399443b84104032af075953f1245 Mon Sep 17 00:00:00 2001 From: Varun Bansal <bansvaru@amazon.com> Date: Fri, 19 Jan 2024 16:57:33 +0530 Subject: [PATCH 108/178] Allow empty translog for ITs testing no ingestion post refresh/commit (#11946) Signed-off-by: bansvaru <bansvaru@amazon.com> --- .../RemoteStoreBaseIntegTestCase.java | 9 +++++++ .../remotestore/RemoteStoreRestoreIT.java | 27 ++++++++++--------- 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index d23e634bb3368..751de66a97806 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -84,6 +84,10 @@ public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { ); protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlush, String index) { + return indexData(numberOfIterations, invokeFlush, false, index); + } + + protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, String index) { long totalOperations = 0; long refreshedOrFlushedOperations = 0; long maxSeqNo = -1; @@ -96,6 +100,11 @@ protected Map<String, Long> indexData(int numberOfIterations, boolean invokeFlus } else { refresh(index); } + + // skip indexing if last iteration as we dont want to have any data in remote translog + if (emptyTranslog && i == numberOfIterations - 1) { + continue; + } maxSeqNoRefreshedOrFlushed = maxSeqNo; indexingStats.put(MAX_SEQ_NO_REFRESHED_OR_FLUSHED + "-shard-" + shardId, maxSeqNoRefreshedOrFlushed); refreshedOrFlushedOperations = totalOperations; diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java index 7626e3dba6424..72296a1c01a24 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java @@ -44,7 +44,7 @@ public class RemoteStoreRestoreIT extends BaseRemoteStoreRestoreIT { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithNoDataPostCommit() throws Exception { - testRestoreFlow(1, true, randomIntBetween(1, 5)); + testRestoreFlow(1, true, true, randomIntBetween(1, 5)); } /** @@ -52,7 +52,7 @@ public void testRemoteTranslogRestoreWithNoDataPostCommit() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws Exception { - testRestoreFlow(1, false, randomIntBetween(1, 5)); + testRestoreFlow(1, false, true, randomIntBetween(1, 5)); } /** @@ -61,7 +61,7 @@ public void testRemoteTranslogRestoreWithNoDataPostRefresh() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithRefreshedData() throws Exception { - testRestoreFlow(randomIntBetween(2, 5), false, randomIntBetween(1, 5)); + testRestoreFlow(randomIntBetween(2, 5), false, false, randomIntBetween(1, 5)); } /** @@ -70,7 +70,7 @@ public void testRemoteTranslogRestoreWithRefreshedData() throws Exception { * @throws IOException IO Exception. */ public void testRemoteTranslogRestoreWithCommittedData() throws Exception { - testRestoreFlow(randomIntBetween(2, 5), true, randomIntBetween(1, 5)); + testRestoreFlow(randomIntBetween(2, 5), true, false, randomIntBetween(1, 5)); } /** @@ -78,7 +78,7 @@ public void testRemoteTranslogRestoreWithCommittedData() throws Exception { * @throws IOException IO Exception. */ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(1, true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, true, true, randomIntBetween(1, 5)); } /** @@ -86,7 +86,7 @@ public void testRTSRestoreWithNoDataPostCommitPrimaryReplicaDown() throws Except * @throws IOException IO Exception. */ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(1, false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(1, false, true, randomIntBetween(1, 5)); } /** @@ -95,7 +95,7 @@ public void testRTSRestoreWithNoDataPostRefreshPrimaryReplicaDown() throws Excep * @throws IOException IO Exception. */ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), false, false, randomIntBetween(1, 5)); } /** @@ -104,7 +104,7 @@ public void testRTSRestoreWithRefreshedDataPrimaryReplicaDown() throws Exception * @throws IOException IO Exception. */ public void testRTSRestoreWithCommittedDataPrimaryReplicaDown() throws Exception { - testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, randomIntBetween(1, 5)); + testRestoreFlowBothPrimaryReplicasDown(randomIntBetween(2, 5), true, false, randomIntBetween(1, 5)); } private void restoreAndVerify(int shardCount, int replicaCount, Map<String, Long> indexStats) throws Exception { @@ -122,9 +122,9 @@ private void restoreAndVerify(int shardCount, int replicaCount, Map<String, Long * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked. * @throws IOException IO Exception. */ - private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int shardCount) throws Exception { + private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, int shardCount) throws Exception { prepareCluster(1, 3, INDEX_NAME, 0, shardCount); - Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME); + Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, emptyTranslog, INDEX_NAME); assertEquals(shardCount, getNumShards(INDEX_NAME).totalNumShards); assertHitCount(client().prepareSearch(INDEX_NAME).setSize(0).get(), indexStats.get(REFRESHED_OR_FLUSHED_OPERATIONS)); @@ -141,9 +141,10 @@ private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, int sh * @param invokeFlush If true, a flush is invoked. Otherwise, a refresh is invoked. * @throws IOException IO Exception. */ - private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, int shardCount) throws Exception { + private void testRestoreFlowBothPrimaryReplicasDown(int numberOfIterations, boolean invokeFlush, boolean emptyTranslog, int shardCount) + throws Exception { prepareCluster(1, 2, INDEX_NAME, 1, shardCount); - Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, INDEX_NAME); + Map<String, Long> indexStats = indexData(numberOfIterations, invokeFlush, emptyTranslog, INDEX_NAME); assertEquals(shardCount * 2, getNumShards(INDEX_NAME).totalNumShards); internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNodeName(INDEX_NAME))); @@ -391,7 +392,7 @@ public void testRTSRestoreWithCommittedDataExcludeIndicesPatterns() throws Excep * @throws IOException IO Exception. */ public void testRTSRestoreDataOnlyInTranslog() throws Exception { - testRestoreFlow(0, true, randomIntBetween(1, 5)); + testRestoreFlow(0, true, false, randomIntBetween(1, 5)); } public void testRateLimitedRemoteDownloads() throws Exception { From 84f303b6069893cf9350556a4c316153ba526901 Mon Sep 17 00:00:00 2001 From: Neetika Singhal <neetiks@amazon.com> Date: Fri, 19 Jan 2024 14:10:31 -0800 Subject: [PATCH 109/178] Fix memory leak issue in ReorganizingLongHash (#11953) Signed-off-by: Neetika Singhal <neetiks@amazon.com> --- CHANGELOG.md | 1 + .../common/util/ReorganizingLongHash.java | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 822e4196adf5e..d0e2a115859c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -236,6 +236,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix issue when calling Delete PIT endpoint and no PITs exist ([#11711](https://github.com/opensearch-project/OpenSearch/pull/11711)) - Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) - Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) +- Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953)) ### Security diff --git a/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java b/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java index 86e7227cb6c85..fe053a26329e4 100644 --- a/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java +++ b/server/src/main/java/org/opensearch/common/util/ReorganizingLongHash.java @@ -118,10 +118,17 @@ public ReorganizingLongHash(final long initialCapacity, final float loadFactor, mask = capacity - 1; grow = (long) (capacity * loadFactor); size = 0; - - table = bigArrays.newLongArray(capacity, false); - table.fill(0, capacity, -1); // -1 represents an empty slot - keys = bigArrays.newLongArray(initialCapacity, false); + try { + table = bigArrays.newLongArray(capacity, false); + table.fill(0, capacity, -1); // -1 represents an empty slot + keys = bigArrays.newLongArray(initialCapacity, false); + } finally { + if (table == null || keys == null) { + // it's important to close the arrays initialized above to prevent memory leak + // refer: https://github.com/opensearch-project/OpenSearch/issues/10154 + Releasables.closeWhileHandlingException(table, keys); + } + } } /** From bea196f4af5a717a91385eab6db482611024910d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 09:09:09 -0500 Subject: [PATCH 110/178] Bump com.google.oauth-client:google-oauth-client from 1.34.1 to 1.35.0 in /plugins/discovery-gce (#11960) * Bump com.google.oauth-client:google-oauth-client Bumps [com.google.oauth-client:google-oauth-client](https://github.com/googleapis/google-oauth-java-client) from 1.34.1 to 1.35.0. - [Release notes](https://github.com/googleapis/google-oauth-java-client/releases) - [Changelog](https://github.com/googleapis/google-oauth-java-client/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/google-oauth-java-client/compare/v1.34.1...v1.35.0) --- updated-dependencies: - dependency-name: com.google.oauth-client:google-oauth-client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/discovery-gce/build.gradle | 2 +- .../discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 | 1 - .../discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index d0e2a115859c3..3148681c18f2f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -176,6 +176,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) +- Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index c8b52d3afcd45..85efcc43fd65a 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -24,7 +24,7 @@ versions << [ dependencies { api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}" api "com.google.api-client:google-api-client:${versions.google}" - api "com.google.oauth-client:google-oauth-client:1.34.1" + api "com.google.oauth-client:google-oauth-client:1.35.0" api "com.google.http-client:google-http-client:${versions.google}" api "com.google.http-client:google-http-client-jackson2:${versions.google}" api 'com.google.code.findbugs:jsr305:3.0.2' diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 deleted file mode 100644 index a8434bd380761..0000000000000 --- a/plugins/discovery-gce/licenses/google-oauth-client-1.34.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4a4f88c5e13143f882268c98239fb85c3b2c6cb2 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 new file mode 100644 index 0000000000000..a52e79088c7ca --- /dev/null +++ b/plugins/discovery-gce/licenses/google-oauth-client-1.35.0.jar.sha1 @@ -0,0 +1 @@ +2f52003156e40ba8be5f349a2716a77428896e69 \ No newline at end of file From 761e13b621bd7463309979528d4d3768266bd7c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 12:39:43 -0500 Subject: [PATCH 111/178] Bump com.diffplug.spotless from 6.23.2 to 6.24.0 (#11962) * Bump com.diffplug.spotless from 6.23.2 to 6.24.0 Bumps com.diffplug.spotless from 6.23.2 to 6.24.0. --- updated-dependencies: - dependency-name: com.diffplug.spotless dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3148681c18f2f..98e3c5b5dd9ec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -177,6 +177,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) +- Bump `com.diffplug.spotless` from 6.23.2 to 6.24.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/build.gradle b/build.gradle index 296c30391af09..4d29ca8664af3 100644 --- a/build.gradle +++ b/build.gradle @@ -54,7 +54,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.23.2" apply false + id "com.diffplug.spotless" version "6.24.0" apply false id "org.gradle.test-retry" version "1.5.4" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' From 304042f5cd55710151b3841df8248b084e0abe28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 14:07:34 -0500 Subject: [PATCH 112/178] Bump com.networknt:json-schema-validator from 1.1.0 to 1.2.0 (#11963) * Bump com.networknt:json-schema-validator from 1.1.0 to 1.2.0 Bumps [com.networknt:json-schema-validator](https://github.com/networknt/json-schema-validator) from 1.1.0 to 1.2.0. - [Release notes](https://github.com/networknt/json-schema-validator/releases) - [Changelog](https://github.com/networknt/json-schema-validator/blob/master/CHANGELOG.md) - [Commits](https://github.com/networknt/json-schema-validator/compare/1.1.0...1.2.0) --- updated-dependencies: - dependency-name: com.networknt:json-schema-validator dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- buildSrc/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 98e3c5b5dd9ec..695e98cebbb38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -174,7 +174,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) - Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) -- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.1.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886)) +- Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963)) - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) - Bump `com.diffplug.spotless` from 6.23.2 to 6.24.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1cb21acd14af7..0562ecc6ee61b 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -118,7 +118,7 @@ dependencies { api 'com.avast.gradle:gradle-docker-compose-plugin:0.17.6' api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.9.6' - api 'com.networknt:json-schema-validator:1.1.0' + api 'com.networknt:json-schema-validator:1.2.0' api 'org.jruby.jcodings:jcodings:1.0.58' api 'org.jruby.joni:joni:2.2.1' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" From 17f20c6bfef3a8a88ce02749f23974044c32f55c Mon Sep 17 00:00:00 2001 From: Kunal Kotwani <kkotwani@amazon.com> Date: Mon, 22 Jan 2024 11:23:03 -0800 Subject: [PATCH 113/178] [Github Action] Enhance version increment script to create tracking issue (#11926) Signed-off-by: Kunal Kotwani <kkotwani@amazon.com> --- .github/workflows/version.yml | 109 +++++++++++++++++++++------------- 1 file changed, 69 insertions(+), 40 deletions(-) diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index a20c671c137b2..f4adef1ff06b0 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -1,28 +1,32 @@ name: Increment Version on: + workflow_dispatch: + inputs: + tag: + description: 'the tag' + required: true + type: string push: tags: - '*.*.*' -permissions: {} +permissions: + contents: write + issues: write + pull-requests: write + jobs: build: if: github.repository == 'opensearch-project/OpenSearch' runs-on: ubuntu-latest steps: - - name: GitHub App token - id: github_app_token - uses: tibdex/github-app-token@v2.1.0 - with: - app_id: ${{ secrets.APP_ID }} - private_key: ${{ secrets.APP_PRIVATE_KEY }} - installation_id: 22958780 - - - uses: actions/checkout@v4 - - name: Fetch Tag and Version Information + - name: Fetch tag and version information run: | TAG=$(echo "${GITHUB_REF#refs/*/}") + if [ -n ${{ github.event.inputs.tag }} ]; then + TAG=${{ github.event.inputs.tag }} + fi CURRENT_VERSION_ARRAY=($(echo "$TAG" | tr . '\n')) BASE=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:2}") BASE_X=$(IFS=. ; echo "${CURRENT_VERSION_ARRAY[*]:0:1}.x") @@ -44,24 +48,22 @@ jobs: echo "NEXT_VERSION=$NEXT_VERSION" >> $GITHUB_ENV echo "NEXT_VERSION_UNDERSCORE=$NEXT_VERSION_UNDERSCORE" >> $GITHUB_ENV echo "NEXT_VERSION_ID=$NEXT_VERSION_ID" >> $GITHUB_ENV + - uses: actions/checkout@v4 with: ref: ${{ env.BASE }} - token: ${{ steps.github_app_token.outputs.token }} - - name: Increment Patch Version - run: | - echo Incrementing $CURRENT_VERSION to $NEXT_VERSION - echo " - \"$CURRENT_VERSION\"" >> .ci/bwcVersions - sed -i "s/opensearch = $CURRENT_VERSION/opensearch = $NEXT_VERSION/g" buildSrc/version.properties - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java - sed -i "s/CURRENT = $CURRENT_VERSION_UNDERSCORE;/CURRENT = $NEXT_VERSION_UNDERSCORE;/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Increment Patch Version on Major.Minor branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: true - - name: Create Pull Request + - name: Create PR for BASE + id: base_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE }} branch: 'create-pull-request/patch-${{ env.BASE }}' commit-message: Increment version to ${{ env.NEXT_VERSION }} @@ -76,19 +78,18 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ env.BASE_X }} - token: ${{ steps.github_app_token.outputs.token }} - - name: Add bwc version to .X branch - run: | - echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION - sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Add Patch Version on Major.X branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: false - - name: Create Pull Request + - name: Create PR for BASE_X + id: base_x_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: ${{ env.BASE_X }} branch: 'create-pull-request/patch-${{ env.BASE_X }}' commit-message: Add bwc version ${{ env.NEXT_VERSION }} @@ -103,19 +104,18 @@ jobs: - uses: actions/checkout@v4 with: ref: main - token: ${{ steps.github_app_token.outputs.token }} - - name: Add bwc version to main branch - run: | - echo Adding bwc version $NEXT_VERSION after $CURRENT_VERSION - sed -i "s/- \"$CURRENT_VERSION\"/\0\n - \"$NEXT_VERSION\"/g" .ci/bwcVersions - echo Adding $NEXT_VERSION_UNDERSCORE after $CURRENT_VERSION_UNDERSCORE - sed -i "s/public static final Version $CURRENT_VERSION_UNDERSCORE = new Version(\([[:digit:]]\+\)\(.*\));/\0\n public static final Version $NEXT_VERSION_UNDERSCORE = new Version($NEXT_VERSION_ID\2);/g" libs/core/src/main/java/org/opensearch/Version.java + - name: Add Patch Version on main branch + uses: peternied/opensearch-core-version-updater@v1 + with: + previous-version: ${{ env.CURRENT_VERSION }} + new-version: ${{ env.NEXT_VERSION }} + update-current: false - - name: Create Pull Request + - name: Create PR for main + id: main_pr uses: peter-evans/create-pull-request@v5 with: - token: ${{ steps.github_app_token.outputs.token }} base: main branch: 'create-pull-request/patch-main' commit-message: Add bwc version ${{ env.NEXT_VERSION }} @@ -126,3 +126,32 @@ jobs: title: '[AUTO] [main] Add bwc version ${{ env.NEXT_VERSION }}.' body: | I've noticed that a new tag ${{ env.TAG }} was pushed, and added a bwc version ${{ env.NEXT_VERSION }}. + + - name: Create tracking issue + id: create-issue + uses: actions/github-script@v6.4.0 + with: + script: | + const body = ` + ### Description + A new version of OpenSearch was released, to prepare for the next release new version numbers need to be updated in all active branches of development. + + ### Exit Criteria + Review and merged the following pull requests + - [ ] ${{ steps.base_pr.outputs.pull-request-url }} + - [ ] ${{ steps.base_x_pr.outputs.pull-request-url }} + - [ ] ${{ steps.main_pr.outputs.pull-request-url }} + + ### Additional Context + See project wide guidance on branching and versions [[link]](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). + ` + const { data: issue }= await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + labels: ["Build"], + title: "Increment version for ${{ env.NEXT_VERSION }}", + body: body + }); + console.error(JSON.stringify(issue)); + return issue.number; + result-encoding: string From b0eab7579aaf7715c6eea7a3d1e206b7d1cb6760 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 17:07:03 -0800 Subject: [PATCH 114/178] Bump com.google.cloud:google-cloud-core from 2.5.10 to 2.30.0 in /plugins/repository-gcs (#11961) * Bump com.google.cloud:google-cloud-core in /plugins/repository-gcs Bumps com.google.cloud:google-cloud-core from 2.5.10 to 2.30.0. --- updated-dependencies: - dependency-name: com.google.cloud:google-cloud-core dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/repository-gcs/build.gradle | 2 +- .../repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 | 1 + .../repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 | 1 - 4 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 695e98cebbb38..3ab7ef4f10620 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -178,6 +178,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) - Bump `com.diffplug.spotless` from 6.23.2 to 6.24.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962)) +- Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index af6bf38cb065c..e0651da8d39c8 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -66,7 +66,7 @@ dependencies { api "com.google.auth:google-auth-library-credentials:${versions.google_auth}" api "com.google.auth:google-auth-library-oauth2-http:${versions.google_auth}" - api 'com.google.cloud:google-cloud-core:2.5.10' + api 'com.google.cloud:google-cloud-core:2.30.0' api 'com.google.cloud:google-cloud-core-http:2.23.0' api 'com.google.cloud:google-cloud-storage:1.113.1' diff --git a/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 new file mode 100644 index 0000000000000..10f8f90df108f --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-2.30.0.jar.sha1 @@ -0,0 +1 @@ +b48ea27cbdccd5f225d8a35ea28e2cd01c25918b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 deleted file mode 100644 index 34c3dc6805500..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-2.5.10.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d979bfe28551eb78cddae9282833ede147a9331 \ No newline at end of file From 48fdb4f9fe5755f0e1f9beb18aa592fd8d46fc53 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Tue, 23 Jan 2024 08:44:13 -0500 Subject: [PATCH 115/178] Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 (#11968) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 1 + .../util/concurrent/OpenSearchExecutors.java | 24 +++++++++++++ .../concurrent/OpenSearchExecutorsTests.java | 36 +++++++++++++++++++ 3 files changed, 61 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ab7ef4f10620..26427b013a1bc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -210,6 +210,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) - Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) - Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)) +- Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) ### Deprecated diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java index c95fb5b1044cc..6e45c3fb7b58d 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java @@ -447,6 +447,30 @@ public boolean offer(E e) { } } + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public void put(E e) { + super.offer(e); + } + + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public boolean offer(E e, long timeout, TimeUnit unit) { + return super.offer(e); + } + + /** + * Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2. + */ + @Override + public boolean add(E e) { + return super.offer(e); + } + } /** diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java index 2063cd26a9e8e..4823ce7a238e3 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/OpenSearchExecutorsTests.java @@ -49,6 +49,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * Tests for OpenSearchExecutors and its components like OpenSearchAbortPolicy. @@ -279,6 +280,41 @@ public void testScaleDown() throws Exception { terminate(pool); } + /** + * The test case is adapted from https://bugs.openjdk.org/browse/JDK-8323659 reproducer. + */ + public void testScaleUpWithSpawningTask() throws Exception { + ThreadPoolExecutor pool = OpenSearchExecutors.newScaling( + getClass().getName() + "/" + getTestName(), + 0, + 1, + between(1, 100), + randomTimeUnit(), + OpenSearchExecutors.daemonThreadFactory("test"), + threadContext + ); + assertThat("Min property", pool.getCorePoolSize(), equalTo(0)); + assertThat("Max property", pool.getMaximumPoolSize(), equalTo(1)); + + final CountDownLatch latch = new CountDownLatch(10); + class TestTask implements Runnable { + @Override + public void run() { + latch.countDown(); + if (latch.getCount() > 0) { + pool.execute(TestTask.this); + } + } + } + pool.execute(new TestTask()); + latch.await(); + + assertThat("wrong pool size", pool.getPoolSize(), lessThanOrEqualTo(1)); + assertThat("wrong active size", pool.getActiveCount(), lessThanOrEqualTo(1)); + + terminate(pool); + } + public void testRejectionMessageAndShuttingDownFlag() throws InterruptedException { int pool = between(1, 10); int queue = between(0, 100); From c529265033ae07fbcd935251b1db16f9354867eb Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Wed, 24 Jan 2024 00:17:55 +0800 Subject: [PATCH 116/178] Fix flaky-test for CopyProcessorTests (#11982) Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- .../ingest/common/CopyProcessorTests.java | 30 +++++++++++++++---- 1 file changed, 25 insertions(+), 5 deletions(-) diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java index f271bdd342d0b..3259ba85ef340 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/CopyProcessorTests.java @@ -14,6 +14,9 @@ import org.opensearch.ingest.TestTemplateService; import org.opensearch.test.OpenSearchTestCase; +import java.util.List; +import java.util.Map; + import static org.hamcrest.Matchers.equalTo; public class CopyProcessorTests extends OpenSearchTestCase { @@ -26,8 +29,7 @@ public void testCopyExistingField() throws Exception { processor.execute(ingestDocument); assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); Object sourceValue = ingestDocument.getFieldValue(sourceFieldName, Object.class); - assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); - assertThat(ingestDocument.getFieldValue(sourceFieldName, Object.class), equalTo(sourceValue)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); Processor processorWithEmptyTarget = createCopyProcessor(sourceFieldName, "", false, false, false); assertThrows( @@ -75,7 +77,7 @@ public void testCopyWithRemoveSource() throws Exception { Processor processor = createCopyProcessor(sourceFieldName, targetFieldName, false, true, false); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); - assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); assertThat(ingestDocument.hasField(sourceFieldName), equalTo(false)); } @@ -97,12 +99,30 @@ public void testCopyToExistingField() throws Exception { Processor processorWithTargetNullValue = createCopyProcessor(sourceFieldName, targetFieldWithNullValue, false, false, false); processorWithTargetNullValue.execute(ingestDocument); assertThat(ingestDocument.hasField(targetFieldWithNullValue), equalTo(true)); - assertThat(ingestDocument.getFieldValue(targetFieldWithNullValue, Object.class), equalTo(sourceValue)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldWithNullValue, Object.class), sourceValue); Processor processorWithOverrideTargetIsTrue = createCopyProcessor(sourceFieldName, targetFieldName, false, false, true); processorWithOverrideTargetIsTrue.execute(ingestDocument); assertThat(ingestDocument.hasField(targetFieldName), equalTo(true)); - assertThat(ingestDocument.getFieldValue(targetFieldName, Object.class), equalTo(sourceValue)); + assertDeepCopiedObjectEquals(ingestDocument.getFieldValue(targetFieldName, Object.class), sourceValue); + } + + @SuppressWarnings("unchecked") + private static void assertDeepCopiedObjectEquals(Object expected, Object actual) { + if (expected instanceof Map) { + Map<String, Object> expectedMap = (Map<String, Object>) expected; + Map<String, Object> actualMap = (Map<String, Object>) actual; + assertEquals(expectedMap.size(), actualMap.size()); + for (Map.Entry<String, Object> expectedEntry : expectedMap.entrySet()) { + assertDeepCopiedObjectEquals(expectedEntry.getValue(), actualMap.get(expectedEntry.getKey())); + } + } else if (expected instanceof List) { + assertArrayEquals(((List<?>) expected).toArray(), ((List<?>) actual).toArray()); + } else if (expected instanceof byte[]) { + assertArrayEquals((byte[]) expected, (byte[]) actual); + } else { + assertEquals(expected, actual); + } } private static Processor createCopyProcessor( From 4167ac5c94c57457bd225fe414f51d1d8fe66ed9 Mon Sep 17 00:00:00 2001 From: "Daniel (dB.) Doubrovkine" <dblock@amazon.com> Date: Tue, 23 Jan 2024 12:38:49 -0500 Subject: [PATCH 117/178] Fix: remove unnecessary trailing slash from REST paths. (#11992) Signed-off-by: dblock <dblock@amazon.com> --- .../action/admin/cluster/RestRemoteClusterInfoAction.java | 2 +- .../rest/action/admin/indices/RestPutMappingAction.java | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java index 28edba4db387d..138f9fdf5c813 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestRemoteClusterInfoAction.java @@ -53,7 +53,7 @@ public final class RestRemoteClusterInfoAction extends BaseRestHandler { @Override public List<Route> routes() { - return singletonList(new Route(GET, "_remote/info")); + return singletonList(new Route(GET, "/_remote/info")); } @Override diff --git a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java index 8fdf000139d89..0d805f5f3bfb8 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/indices/RestPutMappingAction.java @@ -66,10 +66,10 @@ public class RestPutMappingAction extends BaseRestHandler { public List<Route> routes() { return unmodifiableList( asList( - new Route(POST, "/{index}/_mapping/"), - new Route(PUT, "/{index}/_mapping/"), - new Route(POST, "/{index}/_mappings/"), - new Route(PUT, "/{index}/_mappings/") + new Route(POST, "/{index}/_mapping"), + new Route(PUT, "/{index}/_mapping"), + new Route(POST, "/{index}/_mappings"), + new Route(PUT, "/{index}/_mappings") ) ); } From 79964c28901e337b4b0a31bed13a9b54e0720bd5 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Tue, 23 Jan 2024 15:36:48 -0500 Subject: [PATCH 118/178] OpenJDK Update (January 2024 Patch releases) (#11991) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- .../java/org/opensearch/gradle/test/DistroTestPlugin.java | 4 ++-- buildSrc/version.properties | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index 1ad7e056b6ae6..bc44f81a81aff 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -77,9 +77,9 @@ import java.util.stream.Stream; public class DistroTestPlugin implements Plugin<Project> { - private static final String SYSTEM_JDK_VERSION = "17.0.9+9"; + private static final String SYSTEM_JDK_VERSION = "21.0.2+13"; private static final String SYSTEM_JDK_VENDOR = "adoptium"; - private static final String GRADLE_JDK_VERSION = "17.0.9+9"; + private static final String GRADLE_JDK_VERSION = "21.0.2+13"; private static final String GRADLE_JDK_VENDOR = "adoptium"; // all distributions used by distro tests. this is temporary until tests are per distribution diff --git a/buildSrc/version.properties b/buildSrc/version.properties index dd7f2e1eaabf0..8f8e445b04437 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -2,7 +2,7 @@ opensearch = 3.0.0 lucene = 9.9.1 bundled_jdk_vendor = adoptium -bundled_jdk = 21.0.1+12 +bundled_jdk = 21.0.2+13 # optional dependencies spatial4j = 0.7 From b61bd1415de4e7b961566a5ec8de6bc207ba34d0 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Wed, 24 Jan 2024 05:30:04 +0800 Subject: [PATCH 119/178] Fix typo in RemoveProcessor (#11983) * Fix typo in RemoveProcessor Signed-off-by: Gao Binlong <gbinlong@amazon.com> * Modify changelog Signed-off-by: Gao Binlong <gbinlong@amazon.com> --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 2 +- .../java/org/opensearch/ingest/common/RemoveProcessor.java | 4 ++-- .../opensearch/ingest/common/RemoveProcessorFactoryTests.java | 4 ++-- .../org/opensearch/ingest/common/RemoveProcessorTests.java | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 26427b013a1bc..b8dce60ab5ce4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -103,7 +103,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add search query categorizer ([#10255](https://github.com/opensearch-project/OpenSearch/pull/10255)) - Per request phase latency ([#10351](https://github.com/opensearch-project/OpenSearch/issues/10351)) - Add cluster state stats ([#10670](https://github.com/opensearch-project/OpenSearch/pull/10670)) -- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967)) +- Remove ingest processor supports excluding fields ([#10967](https://github.com/opensearch-project/OpenSearch/pull/10967), [#11983](https://github.com/opensearch-project/OpenSearch/pull/11983)) - [Tiered caching] Enabling serialization for IndicesRequestCache key object ([#10275](https://github.com/opensearch-project/OpenSearch/pull/10275)) - [Tiered caching] Defining interfaces, listeners and extending IndicesRequestCache with Tiered cache support ([#10753](https://github.com/opensearch-project/OpenSearch/pull/10753)) - [Remote cluster state] Restore cluster state version during remote state auto restore ([#10853](https://github.com/opensearch-project/OpenSearch/pull/10853)) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java index d01dce02fca31..e6d151aec9be1 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveProcessor.java @@ -72,7 +72,7 @@ public final class RemoveProcessor extends AbstractProcessor { ) { super(tag, description); if (fields == null && excludeFields == null || fields != null && excludeFields != null) { - throw new IllegalArgumentException("ether fields and excludeFields must be set"); + throw new IllegalArgumentException("either fields or excludeFields must be set"); } if (fields != null) { this.fields = new ArrayList<>(fields); @@ -188,7 +188,7 @@ public RemoveProcessor create( final Object excludeField = ConfigurationUtils.readOptionalObject(config, "exclude_field"); if (field == null && excludeField == null || field != null && excludeField != null) { - throw newConfigurationException(TYPE, processorTag, "field", "ether field or exclude_field must be set"); + throw newConfigurationException(TYPE, processorTag, "field", "either field or exclude_field must be set"); } boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java index 179aef2feac0c..6332eeafc387c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorFactoryTests.java @@ -97,13 +97,13 @@ public void testCreateWithExcludeField() throws Exception { OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config) ); - assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set")); + assertThat(exception.getMessage(), equalTo("[field] either field or exclude_field must be set")); Map<String, Object> config2 = new HashMap<>(); config2.put("field", "field1"); config2.put("exclude_field", "field2"); exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2)); - assertThat(exception.getMessage(), equalTo("[field] ether field or exclude_field must be set")); + assertThat(exception.getMessage(), equalTo("[field] either field or exclude_field must be set")); Map<String, Object> config6 = new HashMap<>(); config6.put("exclude_field", "exclude_field"); diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java index 78a3d36124d45..7fc1d3f2f0a3c 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveProcessorTests.java @@ -203,7 +203,7 @@ public void testRemoveMetadataField() throws Exception { public void testCreateRemoveProcessorWithBothFieldsAndExcludeFields() throws Exception { assertThrows( - "ether fields and excludeFields must be set", + "either fields or excludeFields must be set", IllegalArgumentException.class, () -> new RemoveProcessor(randomAlphaOfLength(10), null, null, null, false) ); @@ -223,7 +223,7 @@ public void testCreateRemoveProcessorWithBothFieldsAndExcludeFields() throws Exc } assertThrows( - "ether fields and excludeFields must be set", + "either fields or excludeFields must be set", IllegalArgumentException.class, () -> new RemoveProcessor(randomAlphaOfLength(10), null, fields, excludeFields, false) ); From 30aa8bec12220e528d7fd635c78ea4d76c686917 Mon Sep 17 00:00:00 2001 From: Jay Deng <jayd0104@gmail.com> Date: Tue, 23 Jan 2024 15:00:51 -0800 Subject: [PATCH 120/178] Disable index refresh for TermsFixedDocCountErrorIT (#11976) Signed-off-by: Jay Deng <jayd0104@gmail.com> --- .../bucket/TermsFixedDocCountErrorIT.java | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java index c1c99a9beb689..9f59167fe2d42 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -23,6 +23,7 @@ import java.util.Collection; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.index.IndexSettings.MINIMUM_REFRESH_INTERVAL; import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.OpenSearchIntegTestCase.Scope.TEST; @@ -71,7 +72,10 @@ public void testSimpleAggErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -89,7 +93,10 @@ public void testSimpleAggErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -127,7 +134,10 @@ public void testSimpleAggErrorSingleShard() throws Exception { assertAcked( prepareCreate("idx_shard_error").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_shard_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -170,7 +180,10 @@ public void testSliceLevelDocCountErrorSingleShard() throws Exception { assertAcked( prepareCreate("idx_slice_error").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_slice_error").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -248,7 +261,10 @@ public void testSliceLevelDocCountErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_1").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_1").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); @@ -288,7 +304,10 @@ public void testSliceLevelDocCountErrorMultiShard() throws Exception { assertAcked( prepareCreate("idx_mshard_2").setMapping(STRING_FIELD_NAME, "type=keyword") .setSettings( - Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put("index.refresh_interval", MINIMUM_REFRESH_INTERVAL) ) ); client().prepareIndex("idx_mshard_2").setSource(jsonBuilder().startObject().field(STRING_FIELD_NAME, "A").endObject()).get(); From 7da8628323427e513a102beb1252ef60e1957443 Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Wed, 24 Jan 2024 05:58:53 -0800 Subject: [PATCH 121/178] [Searchable Snapshot] Add Relevant Error handling for Restore API with remote_snapshot. (#11840) * Add Relevant Error handling for Restore API with remote_snapshot. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * update comment with more revelant info. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * apply spotless check. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Add null check Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * apply spotless check. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * fix error message in test. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * apply spotless check. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --------- Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> Signed-off-by: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> --- .../snapshots/SearchableSnapshotIT.java | 42 +++++++++++++++++++ .../opensearch/snapshots/RestoreService.java | 11 +++++ 2 files changed, 53 insertions(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index cb050e17cb237..21554a8e4fb15 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -60,6 +60,7 @@ import static org.opensearch.action.admin.cluster.node.stats.NodesStatsRequest.Metric.FS; import static org.opensearch.core.common.util.CollectionUtils.iterableAsArrayList; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -789,6 +790,47 @@ public void testDefaultShardPreference() throws Exception { } } + public void testRestoreSearchableSnapshotWithIndexStoreTypeThrowsException() throws Exception { + final String snapshotName = "test-snap"; + final String repoName = "test-repo"; + final String indexName1 = "test-idx-1"; + final int numReplicasIndex1 = randomIntBetween(1, 4); + final Client client = client(); + + internalCluster().ensureAtLeastNumDataNodes(numReplicasIndex1 + 1); + createIndexWithDocsAndEnsureGreen(numReplicasIndex1, 100, indexName1); + + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName1); + deleteIndicesAndEnsureGreen(client, indexName1); + + internalCluster().ensureAtLeastNumSearchNodes(numReplicasIndex1 + 1); + + // set "index.store.type" to "remote_snapshot" in index settings of restore API and assert appropriate exception with error message + // is thrown. + final SnapshotRestoreException error = expectThrows( + SnapshotRestoreException.class, + () -> client.admin() + .cluster() + .prepareRestoreSnapshot(repoName, snapshotName) + .setRenamePattern("(.+)") + .setRenameReplacement("$1-copy") + .setIndexSettings( + Settings.builder() + .put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT) + ) + .setWaitForCompletion(true) + .execute() + .actionGet() + ); + assertThat( + error.getMessage(), + containsString( + "cannot restore remote snapshot with index settings \"index.store.type\" set to \"remote_snapshot\". Instead use \"storage_type\": \"remote_snapshot\" as argument to restore." + ) + ); + } + /** * Asserts the cache folder count to match the number of shards and the number of indices within the cache folder * as provided. diff --git a/server/src/main/java/org/opensearch/snapshots/RestoreService.java b/server/src/main/java/org/opensearch/snapshots/RestoreService.java index 9d2c7eb882fa1..bf2c7fc74be92 100644 --- a/server/src/main/java/org/opensearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/opensearch/snapshots/RestoreService.java @@ -121,6 +121,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_VERSION_UPGRADED; import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY; import static org.opensearch.common.util.set.Sets.newHashSet; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectory.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY_MINIMUM_VERSION; import static org.opensearch.index.store.remote.filecache.FileCache.DATA_TO_FILE_CACHE_SIZE_RATIO_SETTING; import static org.opensearch.node.Node.NODE_SEARCH_CACHE_SIZE_SETTING; @@ -226,6 +227,16 @@ public RestoreService( */ public void restoreSnapshot(final RestoreSnapshotRequest request, final ActionListener<RestoreCompletionResponse> listener) { try { + // Setting INDEX_STORE_TYPE_SETTING as REMOTE_SNAPSHOT is intended to be a system-managed index setting that is configured when + // restoring a snapshot and should not be manually set by user. + String storeTypeSetting = request.indexSettings().get(INDEX_STORE_TYPE_SETTING.getKey()); + if (storeTypeSetting != null && storeTypeSetting.equals(RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT.toString())) { + throw new SnapshotRestoreException( + request.repository(), + request.snapshot(), + "cannot restore remote snapshot with index settings \"index.store.type\" set to \"remote_snapshot\". Instead use \"storage_type\": \"remote_snapshot\" as argument to restore." + ); + } // Read snapshot info and metadata from the repository final String repositoryName = request.repository(); Repository repository = repositoriesService.repository(repositoryName); From b1f8b169c2f195012a08cfc6d79ef5e5a3c7f885 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Wed, 24 Jan 2024 12:10:18 -0500 Subject: [PATCH 122/178] Fix test cluster behavior @ClusterScope(scope = Scope.SUITE) and @OpenSearchIntegTestCase.SuiteScopeTestCase for parameterized test cases (#12000) * Fix test cluster behavior @ClusterScope(scope = Scope.SUITE) and @OpenSearchIntegTestCase.SuiteScopeTestCase for parameterized test cases Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Added test cases for all supported test cluster scopes Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --------- Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 2 +- .../test/OpenSearchTestClusterRule.java | 39 ++++++++++++-- ...edDynamicSettingsOpenSearchIntegTests.java | 49 +++++++++++++++++ ...zedStaticSettingsOpenSearchIntegTests.java | 53 +++++++++++++++++++ ...edDynamicSettingsOpenSearchIntegTests.java | 50 +++++++++++++++++ ...zedStaticSettingsOpenSearchIntegTests.java | 53 +++++++++++++++++++ ...edDynamicSettingsOpenSearchIntegTests.java | 50 +++++++++++++++++ ...zedStaticSettingsOpenSearchIntegTests.java | 53 +++++++++++++++++++ 8 files changed, 344 insertions(+), 5 deletions(-) create mode 100644 test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java create mode 100644 test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java create mode 100644 test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java create mode 100644 test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java create mode 100644 test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java create mode 100644 test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index b8dce60ab5ce4..0f5804ed3e632 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -209,7 +209,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Use slice_size == shard_size heuristic in terms aggs for concurrent segment search and properly calculate the doc_count_error ([#11732](https://github.com/opensearch-project/OpenSearch/pull/11732)) - Added Support for dynamically adding SearchRequestOperationsListeners with SearchRequestOperationsCompositeListenerFactory ([#11526](https://github.com/opensearch-project/OpenSearch/pull/11526)) - Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) -- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)) +- Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000)) - Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) ### Deprecated diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java index dcdca8ab35dfc..57e9ccf22ab43 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestClusterRule.java @@ -56,6 +56,8 @@ * on the way cluster settings are being managed. */ class OpenSearchTestClusterRule implements MethodRule { + // Maps each TestCluster instance to the exact test suite instance that triggered its creation + private final Map<TestCluster, OpenSearchIntegTestCase> suites = new IdentityHashMap<>(); private final Map<Class<?>, TestCluster> clusters = new IdentityHashMap<>(); private final Logger logger = LogManager.getLogger(getClass()); @@ -86,7 +88,13 @@ void afterClass() throws Exception { printTestMessage("cleaning up after"); afterInternal(true, null); OpenSearchTestCase.checkStaticState(true); - clusters.remove(getTestClass()); + synchronized (clusters) { + final TestCluster cluster = clusters.remove(getTestClass()); + IOUtils.closeWhileHandlingException(cluster); + if (cluster != null) { + suites.remove(cluster); + } + } } StrictCheckSpanProcessor.validateTracingStateOnShutdown(); } finally { @@ -226,8 +234,11 @@ private static boolean isSuiteScopedTest(Class<?> clazz) { return clazz.getAnnotation(SuiteScopeTestCase.class) != null; } - private boolean hasParametersChanged(final ParameterizedOpenSearchIntegTestCase target) { - return !((ParameterizedOpenSearchIntegTestCase) suiteInstance).hasSameParametersAs(target); + private static boolean hasParametersChanged( + final ParameterizedOpenSearchIntegTestCase instance, + final ParameterizedOpenSearchIntegTestCase target + ) { + return !instance.hasSameParametersAs(target); } private boolean runTestScopeLifecycle() { @@ -242,8 +253,24 @@ private TestCluster buildAndPutCluster(Scope currentClusterScope, long seed, Ope clearClusters(); // all leftovers are gone by now... this is really just a double safety if we miss something somewhere switch (currentClusterScope) { case SUITE: + if (testCluster != null && target instanceof ParameterizedOpenSearchIntegTestCase) { + final OpenSearchIntegTestCase instance = suites.get(testCluster); + if (instance != null) { + assert instance instanceof ParameterizedOpenSearchIntegTestCase; + if (hasParametersChanged( + (ParameterizedOpenSearchIntegTestCase) instance, + (ParameterizedOpenSearchIntegTestCase) target + )) { + IOUtils.closeWhileHandlingException(testCluster); + printTestMessage("new instance of parameterized test class, recreating test cluster for suite"); + testCluster = null; + } + } + } + if (testCluster == null) { // only build if it's not there yet testCluster = buildWithPrivateContext(currentClusterScope, seed, target); + suites.put(testCluster, target); } break; case TEST: @@ -310,6 +337,7 @@ private void clearClusters() throws Exception { synchronized (clusters) { if (!clusters.isEmpty()) { IOUtils.close(clusters.values()); + suites.clear(); clusters.clear(); } } @@ -363,7 +391,10 @@ private void initializeSuiteScope(OpenSearchIntegTestCase target, FrameworkMetho // Catching the case when parameterized test cases are run: the test class stays the same but the test instances changes. if (target instanceof ParameterizedOpenSearchIntegTestCase) { assert suiteInstance instanceof ParameterizedOpenSearchIntegTestCase; - if (hasParametersChanged((ParameterizedOpenSearchIntegTestCase) target)) { + if (hasParametersChanged( + (ParameterizedOpenSearchIntegTestCase) suiteInstance, + (ParameterizedOpenSearchIntegTestCase) target + )) { printTestMessage("new instance of parameterized test class, recreating cluster scope", method); afterClass(); beforeClass(); diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..4b864d820d9ec --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public ParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..76e05d55a854b --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +public class ParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + + public ParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..e96eaec6e85a3 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,50 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..093be04d556b0 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..bd271b93ba674 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -0,0 +1,50 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class TestScopedParameterizedDynamicSettingsOpenSearchIntegTests extends ParameterizedDynamicSettingsOpenSearchIntegTestCase { + public TestScopedParameterizedDynamicSettingsOpenSearchIntegTests(Settings dynamicSettings) { + super(dynamicSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(cluster.getState().getMetadata().settings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } +} diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java new file mode 100644 index 0000000000000..014f96657ba03 --- /dev/null +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -0,0 +1,53 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.test; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST) +public class TestScopedParameterizedStaticSettingsOpenSearchIntegTests extends ParameterizedStaticSettingsOpenSearchIntegTestCase { + public TestScopedParameterizedStaticSettingsOpenSearchIntegTests(Settings staticSettings) { + super(staticSettings); + } + + @ParametersFactory + public static Collection<Object[]> parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } + + public void testSettings() throws IOException { + final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); + for (final NodeInfo node : nodes.getNodes()) { + assertThat( + CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(node.getSettings()), + equalTo(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settings)) + ); + } + } +} From 76f7e73cffab0d0fbfb4bb0f24affd17b078d50a Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli <vemulapallisarat@gmail.com> Date: Wed, 24 Jan 2024 12:36:22 -0800 Subject: [PATCH 123/178] Fixing Minio docker compose fixture (#12014) Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> --- test/fixtures/minio-fixture/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/fixtures/minio-fixture/docker-compose.yml b/test/fixtures/minio-fixture/docker-compose.yml index e4d2faab9a657..539ca9471fa04 100644 --- a/test/fixtures/minio-fixture/docker-compose.yml +++ b/test/fixtures/minio-fixture/docker-compose.yml @@ -1,4 +1,4 @@ -version: '3' +version: '3.2' services: minio-fixture: build: From 829215c4a4660ee65026c40a0a01ebe100b3ee2c Mon Sep 17 00:00:00 2001 From: Prabhat <20185657+CaptainDredge@users.noreply.github.com> Date: Wed, 24 Jan 2024 13:38:23 -0800 Subject: [PATCH 124/178] Added New DateTime parser implementation (#11465) Signed-off-by: Prabhat Sharma <ptsharma@amazon.com> Co-authored-by: Prabhat Sharma <ptsharma@amazon.com> --- CHANGELOG.md | 1 + NOTICE.txt | 3 + .../common/time/DateFormatters.java | 37 ++ .../opensearch/common/time/FormatNames.java | 1 + .../common/time/JavaDateFormatter.java | 90 ++-- .../time/OpenSearchDateTimeFormatter.java | 85 ++++ .../time/OpenSearchDateTimePrinter.java | 30 ++ .../RFC3339CompatibleDateTimeFormatter.java | 428 ++++++++++++++++++ .../joda/JavaJodaTimeDuellingTests.java | 4 +- .../common/time/DateFormattersTests.java | 154 ++++++- 10 files changed, 796 insertions(+), 37 deletions(-) create mode 100644 server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java create mode 100644 server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java create mode 100644 server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 0f5804ed3e632..aa7675edb8cc4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -133,6 +133,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) - Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) - Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) +- New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/NOTICE.txt b/NOTICE.txt index 6c7dc983f8c7a..d463b8f28561f 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -10,3 +10,6 @@ Foundation (http://www.apache.org/). This product includes software developed by Joda.org (http://www.joda.org/). + +This product includes software developed by +Morten Haraldsen (ethlo) (https://github.com/ethlo) under the Apache License, version 2.0. diff --git a/server/src/main/java/org/opensearch/common/time/DateFormatters.java b/server/src/main/java/org/opensearch/common/time/DateFormatters.java index e74ab687b903b..527dce7677dd8 100644 --- a/server/src/main/java/org/opensearch/common/time/DateFormatters.java +++ b/server/src/main/java/org/opensearch/common/time/DateFormatters.java @@ -1299,6 +1299,41 @@ public class DateFormatters { .withResolverStyle(ResolverStyle.STRICT) ); + /** + * Returns RFC 3339 a popular ISO 8601 profile compatible date time formatter and parser. + * This is not fully compatible to the existing spec, its more linient and closely follows w3c note on datetime + */ + + public static final DateFormatter RFC3339_LENIENT_DATE_FORMATTER = new JavaDateFormatter( + "rfc3339_lenient", + new OpenSearchDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_PRINTER), + new RFC3339CompatibleDateTimeFormatter( + new DateTimeFormatterBuilder().append(DATE_FORMATTER) + .optionalStart() + .appendLiteral('T') + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalStart() + .appendFraction(NANO_OF_SECOND, 1, 9, true) + .optionalEnd() + .optionalStart() + .appendLiteral(',') + .appendFraction(NANO_OF_SECOND, 1, 9, false) + .optionalEnd() + .optionalStart() + .appendOffsetId() + .optionalEnd() + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT) + .withResolverStyle(ResolverStyle.STRICT) + ) + ); + private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder().append(HOUR_MINUTE_FORMATTER) .appendLiteral(":") .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) @@ -2152,6 +2187,8 @@ static DateFormatter forPattern(String input) { return STRICT_YEAR_MONTH; } else if (FormatNames.STRICT_YEAR_MONTH_DAY.matches(input)) { return STRICT_YEAR_MONTH_DAY; + } else if (FormatNames.RFC3339_LENIENT.matches(input)) { + return RFC3339_LENIENT_DATE_FORMATTER; } else { try { return new JavaDateFormatter( diff --git a/server/src/main/java/org/opensearch/common/time/FormatNames.java b/server/src/main/java/org/opensearch/common/time/FormatNames.java index ba0a8fcf4a17a..ec5e825fc933e 100644 --- a/server/src/main/java/org/opensearch/common/time/FormatNames.java +++ b/server/src/main/java/org/opensearch/common/time/FormatNames.java @@ -44,6 +44,7 @@ */ public enum FormatNames { ISO8601(null, "iso8601"), + RFC3339_LENIENT(null, "rfc3339_lenient"), BASIC_DATE("basicDate", "basic_date"), BASIC_DATE_TIME("basicDateTime", "basic_date_time"), BASIC_DATE_TIME_NO_MILLIS("basicDateTimeNoMillis", "basic_date_time_no_millis"), diff --git a/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java b/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java index f711b14aeb928..033ea280e6172 100644 --- a/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java +++ b/server/src/main/java/org/opensearch/common/time/JavaDateFormatter.java @@ -36,6 +36,7 @@ import org.opensearch.core.common.Strings; import java.text.ParsePosition; +import java.time.DateTimeException; import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; @@ -52,7 +53,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.BiConsumer; import java.util.stream.Collectors; @@ -70,11 +70,11 @@ class JavaDateFormatter implements DateFormatter { private final String format; private final String printFormat; - private final DateTimeFormatter printer; - private final List<DateTimeFormatter> parsers; + private final OpenSearchDateTimePrinter printer; + private final List<OpenSearchDateTimeFormatter> parsers; private final JavaDateFormatter roundupParser; private final Boolean canCacheLastParsedFormatter; - private volatile DateTimeFormatter lastParsedformatter = null; + private volatile OpenSearchDateTimeFormatter lastParsedformatter = null; /** * A round up formatter @@ -83,11 +83,11 @@ class JavaDateFormatter implements DateFormatter { */ static class RoundUpFormatter extends JavaDateFormatter { - RoundUpFormatter(String format, List<DateTimeFormatter> roundUpParsers) { + RoundUpFormatter(String format, List<OpenSearchDateTimeFormatter> roundUpParsers) { super(format, firstFrom(roundUpParsers), null, roundUpParsers); } - private static DateTimeFormatter firstFrom(List<DateTimeFormatter> roundUpParsers) { + private static OpenSearchDateTimeFormatter firstFrom(List<OpenSearchDateTimeFormatter> roundUpParsers) { return roundUpParsers.get(0); } @@ -101,14 +101,18 @@ JavaDateFormatter getRoundupParser() { JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, + OpenSearchDateTimePrinter printer, Boolean canCacheLastParsedFormatter, - DateTimeFormatter... parsers + OpenSearchDateTimeFormatter... parsers ) { this(format, printFormat, printer, ROUND_UP_BASE_FIELDS, canCacheLastParsedFormatter, parsers); } JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) { + this(format, format, wrapFormatter(printer), false, wrapAllFormatters(parsers)); + } + + JavaDateFormatter(String format, OpenSearchDateTimePrinter printer, OpenSearchDateTimeFormatter... parsers) { this(format, format, printer, false, parsers); } @@ -127,19 +131,19 @@ JavaDateFormatter getRoundupParser() { JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, + OpenSearchDateTimePrinter printer, BiConsumer<DateTimeFormatterBuilder, DateTimeFormatter> roundupParserConsumer, Boolean canCacheLastParsedFormatter, - DateTimeFormatter... parsers + OpenSearchDateTimeFormatter... parsers ) { if (printer == null) { throw new IllegalArgumentException("printer may not be null"); } - long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count(); + long distinctZones = Arrays.stream(parsers).map(OpenSearchDateTimeFormatter::getZone).distinct().count(); if (distinctZones > 1) { throw new IllegalArgumentException("formatters must have the same time zone"); } - long distinctLocales = Arrays.stream(parsers).map(DateTimeFormatter::getLocale).distinct().count(); + long distinctLocales = Arrays.stream(parsers).map(OpenSearchDateTimeFormatter::getLocale).distinct().count(); if (distinctLocales > 1) { throw new IllegalArgumentException("formatters must have the same locale"); } @@ -149,12 +153,12 @@ JavaDateFormatter getRoundupParser() { this.canCacheLastParsedFormatter = canCacheLastParsedFormatter; if (parsers.length == 0) { - this.parsers = Collections.singletonList(printer); + this.parsers = Collections.singletonList((OpenSearchDateTimeFormatter) printer); } else { this.parsers = Arrays.asList(parsers); } List<DateTimeFormatter> roundUp = createRoundUpParser(format, roundupParserConsumer); - this.roundupParser = new RoundUpFormatter(format, roundUp); + this.roundupParser = new RoundUpFormatter(format, wrapAllFormatters(roundUp)); } JavaDateFormatter( @@ -163,7 +167,7 @@ JavaDateFormatter getRoundupParser() { BiConsumer<DateTimeFormatterBuilder, DateTimeFormatter> roundupParserConsumer, DateTimeFormatter... parsers ) { - this(format, format, printer, roundupParserConsumer, false, parsers); + this(format, format, wrapFormatter(printer), roundupParserConsumer, false, wrapAllFormatters(parsers)); } /** @@ -181,7 +185,8 @@ private List<DateTimeFormatter> createRoundUpParser( ) { if (format.contains("||") == false) { List<DateTimeFormatter> roundUpParsers = new ArrayList<>(); - for (DateTimeFormatter parser : this.parsers) { + for (OpenSearchDateTimeFormatter customparser : this.parsers) { + DateTimeFormatter parser = customparser.getFormatter(); DateTimeFormatterBuilder builder = new DateTimeFormatterBuilder(); builder.append(parser); roundupParserConsumer.accept(builder, parser); @@ -201,12 +206,12 @@ public static DateFormatter combined( assert formatters.size() > 0; assert printFormatter != null; - List<DateTimeFormatter> parsers = new ArrayList<>(formatters.size()); - List<DateTimeFormatter> roundUpParsers = new ArrayList<>(formatters.size()); + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>(formatters.size()); + List<OpenSearchDateTimeFormatter> roundUpParsers = new ArrayList<>(formatters.size()); assert printFormatter instanceof JavaDateFormatter; JavaDateFormatter javaPrintFormatter = (JavaDateFormatter) printFormatter; - DateTimeFormatter printer = javaPrintFormatter.getPrinter(); + OpenSearchDateTimePrinter printer = javaPrintFormatter.getPrinter(); for (DateFormatter formatter : formatters) { assert formatter instanceof JavaDateFormatter; JavaDateFormatter javaDateFormatter = (JavaDateFormatter) formatter; @@ -227,9 +232,9 @@ public static DateFormatter combined( private JavaDateFormatter( String format, String printFormat, - DateTimeFormatter printer, - List<DateTimeFormatter> roundUpParsers, - List<DateTimeFormatter> parsers, + OpenSearchDateTimePrinter printer, + List<OpenSearchDateTimeFormatter> roundUpParsers, + List<OpenSearchDateTimeFormatter> parsers, Boolean canCacheLastParsedFormatter ) { this.format = format; @@ -245,6 +250,15 @@ private JavaDateFormatter( DateTimeFormatter printer, List<DateTimeFormatter> roundUpParsers, List<DateTimeFormatter> parsers + ) { + this(format, format, wrapFormatter(printer), wrapAllFormatters(roundUpParsers), wrapAllFormatters(parsers), false); + } + + private JavaDateFormatter( + String format, + OpenSearchDateTimePrinter printer, + List<OpenSearchDateTimeFormatter> roundUpParsers, + List<OpenSearchDateTimeFormatter> parsers ) { this(format, format, printer, roundUpParsers, parsers, false); } @@ -253,7 +267,7 @@ JavaDateFormatter getRoundupParser() { return roundupParser; } - DateTimeFormatter getPrinter() { + OpenSearchDateTimePrinter getPrinter() { return printer; } @@ -265,7 +279,7 @@ public TemporalAccessor parse(String input) { try { return doParse(input); - } catch (DateTimeParseException e) { + } catch (DateTimeException e) { throw new IllegalArgumentException("failed to parse date field [" + input + "] with format [" + format + "]", e); } } @@ -289,14 +303,14 @@ private TemporalAccessor doParse(String input) { Object object = null; if (canCacheLastParsedFormatter && lastParsedformatter != null) { ParsePosition pos = new ParsePosition(0); - object = lastParsedformatter.toFormat().parseObject(input, pos); + object = lastParsedformatter.parseObject(input, pos); if (parsingSucceeded(object, input, pos)) { return (TemporalAccessor) object; } } - for (DateTimeFormatter formatter : parsers) { + for (OpenSearchDateTimeFormatter formatter : parsers) { ParsePosition pos = new ParsePosition(0); - object = formatter.toFormat().parseObject(input, pos); + object = formatter.parseObject(input, pos); if (parsingSucceeded(object, input, pos)) { lastParsedformatter = formatter; return (TemporalAccessor) object; @@ -312,16 +326,28 @@ private boolean parsingSucceeded(Object object, String input, ParsePosition pos) return object != null && pos.getIndex() == input.length(); } + private static OpenSearchDateTimeFormatter wrapFormatter(DateTimeFormatter formatter) { + return new OpenSearchDateTimeFormatter(formatter); + } + + private static OpenSearchDateTimeFormatter[] wrapAllFormatters(DateTimeFormatter... formatters) { + return Arrays.stream(formatters).map(JavaDateFormatter::wrapFormatter).toArray(OpenSearchDateTimeFormatter[]::new); + } + + private static List<OpenSearchDateTimeFormatter> wrapAllFormatters(List<DateTimeFormatter> formatters) { + return formatters.stream().map(JavaDateFormatter::wrapFormatter).collect(Collectors.toList()); + } + @Override public DateFormatter withZone(ZoneId zoneId) { // shortcurt to not create new objects unnecessarily if (zoneId.equals(zone())) { return this; } - List<DateTimeFormatter> parsers = new CopyOnWriteArrayList<>( + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>( this.parsers.stream().map(p -> p.withZone(zoneId)).collect(Collectors.toList()) ); - List<DateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() + List<OpenSearchDateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() .stream() .map(p -> p.withZone(zoneId)) .collect(Collectors.toList()); @@ -334,10 +360,10 @@ public DateFormatter withLocale(Locale locale) { if (locale.equals(locale())) { return this; } - List<DateTimeFormatter> parsers = new CopyOnWriteArrayList<>( + List<OpenSearchDateTimeFormatter> parsers = new ArrayList<>( this.parsers.stream().map(p -> p.withLocale(locale)).collect(Collectors.toList()) ); - List<DateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() + List<OpenSearchDateTimeFormatter> roundUpParsers = this.roundupParser.getParsers() .stream() .map(p -> p.withLocale(locale)) .collect(Collectors.toList()); @@ -396,7 +422,7 @@ public String toString() { return String.format(Locale.ROOT, "format[%s] locale[%s]", format, locale()); } - Collection<DateTimeFormatter> getParsers() { + Collection<OpenSearchDateTimeFormatter> getParsers() { return parsers; } } diff --git a/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java new file mode 100644 index 0000000000000..3a629d8843949 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimeFormatter.java @@ -0,0 +1,85 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.time; + +import java.text.Format; +import java.text.ParsePosition; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalQuery; +import java.util.Locale; + +/** +* Wrapper class for DateTimeFormatter{@link java.time.format.DateTimeFormatter} +* to allow for custom implementations for datetime parsing/formatting + */ +class OpenSearchDateTimeFormatter implements OpenSearchDateTimePrinter { + private final DateTimeFormatter formatter; + + public OpenSearchDateTimeFormatter(String pattern) { + this.formatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + } + + public OpenSearchDateTimeFormatter(String pattern, Locale locale) { + this.formatter = DateTimeFormatter.ofPattern(pattern, locale); + } + + public OpenSearchDateTimeFormatter(DateTimeFormatter formatter) { + this.formatter = formatter; + } + + public OpenSearchDateTimeFormatter withLocale(Locale locale) { + return new OpenSearchDateTimeFormatter(getFormatter().withLocale(locale)); + } + + public OpenSearchDateTimeFormatter withZone(ZoneId zoneId) { + return new OpenSearchDateTimeFormatter(getFormatter().withZone(zoneId)); + } + + public String format(TemporalAccessor temporal) { + return this.getFormatter().format(temporal); + } + + public TemporalAccessor parse(CharSequence text, ParsePosition position) { + return this.getFormatter().parse(text, position); + } + + public TemporalAccessor parse(CharSequence text) { + return this.getFormatter().parse(text); + } + + public <T> T parse(CharSequence text, TemporalQuery<T> query) { + return this.getFormatter().parse(text, query); + } + + public ZoneId getZone() { + return this.getFormatter().getZone(); + } + + public Locale getLocale() { + return this.getFormatter().getLocale(); + } + + public TemporalAccessor parse(String input) { + return formatter.parse(input); + } + + public DateTimeFormatter getFormatter() { + return formatter; + } + + public Format toFormat() { + return getFormatter().toFormat(); + } + + public Object parseObject(String text, ParsePosition pos) { + return getFormatter().toFormat().parseObject(text, pos); + } +} diff --git a/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java new file mode 100644 index 0000000000000..350bae21b22b1 --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/OpenSearchDateTimePrinter.java @@ -0,0 +1,30 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.common.time; + +import java.time.ZoneId; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; + +/** + * Interface for DateTimeFormatter{@link java.time.format.DateTimeFormatter} + * to allow for custom implementations for datetime formatting + */ +interface OpenSearchDateTimePrinter { + + public OpenSearchDateTimePrinter withLocale(Locale locale); + + public OpenSearchDateTimePrinter withZone(ZoneId zoneId); + + public String format(TemporalAccessor temporal); + + public Locale getLocale(); + + public ZoneId getZone(); +} diff --git a/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java b/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java new file mode 100644 index 0000000000000..98b87efd2380b --- /dev/null +++ b/server/src/main/java/org/opensearch/common/time/RFC3339CompatibleDateTimeFormatter.java @@ -0,0 +1,428 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Internet Time Utility project (https://github.com/ethlo/itu) under the Apache License, version 2.0. + * Copyright (C) 2017 Morten Haraldsen (ethlo) + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.common.time; + +import java.text.ParsePosition; +import java.time.DateTimeException; +import java.time.OffsetDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.format.DateTimeParseException; +import java.time.temporal.TemporalAccessor; +import java.util.Arrays; +import java.util.Locale; + +/** + * Defines a close profile of RFC3339 datetime format where the date is mandatory and the time is optional. + * <p> + * The returned formatter can only be used for parsing, printing is unsupported. + * <p> + * This parser can parse zoned datetimes. + * The parser is strict by default, thus time string {@code 24:00} cannot be parsed. + * <p> + * It accepts formats described by the following syntax: + * <pre> + * Year: + * YYYY (eg 1997) + * Year and month: + * YYYY-MM (eg 1997-07) + * Complete date: + * YYYY-MM-DD (eg 1997-07-16) + * Complete date plus hours and minutes: + * YYYY-MM-DDThh:mmTZD (eg 1997-07-16T19:20+01:00) + * Complete date plus hours, minutes and seconds: + * YYYY-MM-DDThh:mm:ssTZD (eg 1997-07-16T19:20:30+01:00) + * Complete date plus hours, minutes, seconds and a decimal fraction of a second + * YYYY-MM-DDThh:mm:ss.sTZD (eg 1997-07-16T19:20:30.45+01:00) + * YYYY-MM-DDThh:mm:ss,sTZD (eg 1997-07-16T19:20:30,45+01:00) + * where: + * + * YYYY = four-digit year + * MM = two-digit month (01=January, etc.) + * DD = two-digit day of month (01 through 31) + * hh = two digits of hour (00 through 23) (am/pm NOT allowed) + * mm = two digits of minute (00 through 59) + * ss = two digits of second (00 through 59) + * s = one or more(max 9) digits representing a decimal fraction of a second + * TZD = time zone designator (Z or z or +hh:mm or -hh:mm) + * </pre> + */ +final class RFC3339CompatibleDateTimeFormatter extends OpenSearchDateTimeFormatter { + public static final char DATE_SEPARATOR = '-'; + public static final char TIME_SEPARATOR = ':'; + public static final char SEPARATOR_UPPER = 'T'; + private static final char PLUS = '+'; + private static final char MINUS = '-'; + private static final char SEPARATOR_LOWER = 't'; + private static final char SEPARATOR_SPACE = ' '; + private static final char FRACTION_SEPARATOR_1 = '.'; + private static final char FRACTION_SEPARATOR_2 = ','; + private static final char ZULU_UPPER = 'Z'; + private static final char ZULU_LOWER = 'z'; + + private ZoneId zone; + + public RFC3339CompatibleDateTimeFormatter(String pattern) { + super(pattern); + } + + public RFC3339CompatibleDateTimeFormatter(java.time.format.DateTimeFormatter formatter) { + super(formatter); + } + + public RFC3339CompatibleDateTimeFormatter(java.time.format.DateTimeFormatter formatter, ZoneId zone) { + super(formatter); + this.zone = zone; + } + + @Override + public OpenSearchDateTimeFormatter withZone(ZoneId zoneId) { + return new RFC3339CompatibleDateTimeFormatter(getFormatter().withZone(zoneId), zoneId); + } + + @Override + public OpenSearchDateTimeFormatter withLocale(Locale locale) { + return new RFC3339CompatibleDateTimeFormatter(getFormatter().withLocale(locale)); + } + + @Override + public Object parseObject(String text, ParsePosition pos) { + try { + return parse(text); + } catch (DateTimeException e) { + return null; + } + } + + @Override + public TemporalAccessor parse(final String dateTime) { + OffsetDateTime parsedDatetime = parse(dateTime, new ParsePosition(0)); + return zone == null ? parsedDatetime : parsedDatetime.atZoneSameInstant(zone); + } + + public OffsetDateTime parse(String date, ParsePosition pos) { + if (date == null) { + throw new IllegalArgumentException("date cannot be null"); + } + + final int len = date.length() - pos.getIndex(); + if (len <= 0) { + throw new DateTimeParseException("out of bound parse position", date, pos.getIndex()); + } + final char[] chars = date.substring(pos.getIndex()).toCharArray(); + + // Date portion + + // YEAR + final int years = getYear(chars, pos); + if (4 == len) { + return OffsetDateTime.of(years, 1, 1, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // MONTH + consumeChar(chars, pos, DATE_SEPARATOR); + final int months = getMonth(chars, pos); + if (7 == len) { + return OffsetDateTime.of(years, months, 1, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // DAY + consumeChar(chars, pos, DATE_SEPARATOR); + final int days = getDay(chars, pos); + if (10 == len) { + return OffsetDateTime.of(years, months, days, 0, 0, 0, 0, ZoneOffset.UTC); + } + + // HOURS + consumeChar(chars, pos, SEPARATOR_UPPER, SEPARATOR_LOWER, SEPARATOR_SPACE); + final int hours = getHour(chars, pos); + + // MINUTES + consumeChar(chars, pos, TIME_SEPARATOR); + final int minutes = getMinute(chars, pos); + if (16 == len) { + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + + // SECONDS or TIMEZONE + return handleTime(chars, pos, years, months, days, hours, minutes); + } + + private static boolean isDigit(char c) { + return (c >= '0' && c <= '9'); + } + + private static int digit(char c) { + return c - '0'; + } + + private static int readInt(final char[] strNum, ParsePosition pos, int n) { + int start = pos.getIndex(), end = start + n; + if (end > strNum.length) { + pos.setErrorIndex(end); + throw new DateTimeParseException("Unexpected end of expression at position " + strNum.length, new String(strNum), end); + } + + int result = 0; + for (int i = start; i < end; i++) { + final char c = strNum[i]; + if (isDigit(c) == false) { + pos.setErrorIndex(i); + throw new DateTimeParseException("Character " + c + " is not a digit", new String(strNum), i); + } + int digit = digit(c); + result = result * 10 + digit; + } + pos.setIndex(end); + return result; + } + + private static int readIntUnchecked(final char[] strNum, ParsePosition pos, int n) { + int start = pos.getIndex(), end = start + n; + int result = 0; + for (int i = start; i < end; i++) { + final char c = strNum[i]; + int digit = digit(c); + result = result * 10 + digit; + } + pos.setIndex(end); + return result; + } + + private static int getHour(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getMinute(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getDay(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static boolean isValidOffset(char[] chars, int offset) { + return offset < chars.length; + } + + private static void consumeChar(char[] chars, ParsePosition pos, char expected) { + int offset = pos.getIndex(); + if (isValidOffset(chars, offset) == false) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + if (chars[offset] != expected) { + throw new DateTimeParseException("Expected character " + expected + " at position " + offset, new String(chars), offset); + } + pos.setIndex(offset + 1); + } + + private static void consumeNextChar(char[] chars, ParsePosition pos) { + int offset = pos.getIndex(); + if (isValidOffset(chars, offset) == false) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + pos.setIndex(offset + 1); + } + + private static boolean checkPositionContains(char[] chars, ParsePosition pos, char... expected) { + int offset = pos.getIndex(); + if (offset >= chars.length) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + boolean found = false; + for (char e : expected) { + if (chars[offset] == e) { + found = true; + break; + } + } + return found; + } + + private static void consumeChar(char[] chars, ParsePosition pos, char... expected) { + int offset = pos.getIndex(); + if (offset >= chars.length) { + throw new DateTimeParseException("Unexpected end of input", new String(chars), offset); + } + + boolean found = false; + for (char e : expected) { + if (chars[offset] == e) { + found = true; + pos.setIndex(offset + 1); + break; + } + } + if (!found) { + throw new DateTimeParseException( + "Expected character " + Arrays.toString(expected) + " at position " + offset, + new String(chars), + offset + ); + } + } + + private static void assertNoMoreChars(char[] chars, ParsePosition pos) { + if (chars.length > pos.getIndex()) { + throw new DateTimeParseException("Trailing junk data after position " + pos.getIndex(), new String(chars), pos.getIndex()); + } + } + + private static ZoneOffset parseTimezone(char[] chars, ParsePosition pos) { + int offset = pos.getIndex(); + final int left = chars.length - offset; + if (checkPositionContains(chars, pos, ZULU_LOWER, ZULU_UPPER)) { + consumeNextChar(chars, pos); + assertNoMoreChars(chars, pos); + return ZoneOffset.UTC; + } + + if (left != 6) { + throw new DateTimeParseException("Invalid timezone offset", new String(chars, offset, left), offset); + } + + final char sign = chars[offset]; + consumeNextChar(chars, pos); + int hours = getHour(chars, pos); + consumeChar(chars, pos, TIME_SEPARATOR); + int minutes = getMinute(chars, pos); + if (sign == MINUS) { + if (hours == 0 && minutes == 0) { + throw new DateTimeParseException("Unknown 'Local Offset Convention' date-time not allowed", new String(chars), offset); + } + hours = -hours; + minutes = -minutes; + } else if (sign != PLUS) { + throw new DateTimeParseException("Invalid character starting at position " + offset, new String(chars), offset); + } + + return ZoneOffset.ofHoursMinutes(hours, minutes); + } + + private static OffsetDateTime handleTime(char[] chars, ParsePosition pos, int year, int month, int day, int hour, int minute) { + switch (chars[pos.getIndex()]) { + case TIME_SEPARATOR: + consumeChar(chars, pos, TIME_SEPARATOR); + return handleSeconds(year, month, day, hour, minute, chars, pos); + + case PLUS: + case MINUS: + case ZULU_UPPER: + case ZULU_LOWER: + final ZoneOffset zoneOffset = parseTimezone(chars, pos); + return OffsetDateTime.of(year, month, day, hour, minute, 0, 0, zoneOffset); + } + throw new DateTimeParseException("Unexpected character " + " at position " + pos.getIndex(), new String(chars), pos.getIndex()); + } + + private static int getMonth(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getYear(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 4); + } + + private static int getSeconds(final char[] chars, ParsePosition pos) { + return readInt(chars, pos, 2); + } + + private static int getFractions(final char[] chars, final ParsePosition pos, final int len) { + final int fractions; + fractions = readIntUnchecked(chars, pos, len); + switch (len) { + case 0: + throw new DateTimeParseException("Must have at least 1 fraction digit", new String(chars), pos.getIndex()); + case 1: + return fractions * 100_000_000; + case 2: + return fractions * 10_000_000; + case 3: + return fractions * 1_000_000; + case 4: + return fractions * 100_000; + case 5: + return fractions * 10_000; + case 6: + return fractions * 1_000; + case 7: + return fractions * 100; + case 8: + return fractions * 10; + default: + return fractions; + } + } + + public static int indexOfNonDigit(final char[] text, int offset) { + for (int i = offset; i < text.length; i++) { + if (isDigit(text[i]) == false) { + return i; + } + } + return -1; + } + + public static void consumeDigits(final char[] text, ParsePosition pos) { + final int idx = indexOfNonDigit(text, pos.getIndex()); + if (idx == -1) { + pos.setErrorIndex(text.length); + } else { + pos.setIndex(idx); + } + } + + private static OffsetDateTime handleSeconds(int year, int month, int day, int hour, int minute, char[] chars, ParsePosition pos) { + // From here the specification is more lenient + final int seconds = getSeconds(chars, pos); + int currPos = pos.getIndex(); + final int remaining = chars.length - currPos; + if (remaining == 0) { + // No offset + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + + ZoneOffset offset = null; + int fractions = 0; + if (remaining == 1 && checkPositionContains(chars, pos, ZULU_LOWER, ZULU_UPPER)) { + consumeNextChar(chars, pos); + // Do nothing we are done + offset = ZoneOffset.UTC; + assertNoMoreChars(chars, pos); + } else if (remaining >= 1 && checkPositionContains(chars, pos, FRACTION_SEPARATOR_1, FRACTION_SEPARATOR_2)) { + // We have fractional seconds; + consumeNextChar(chars, pos); + ParsePosition initPosition = new ParsePosition(pos.getIndex()); + consumeDigits(chars, pos); + if (pos.getErrorIndex() == -1) { + // We have an end of fractions + final int len = pos.getIndex() - initPosition.getIndex(); + fractions = getFractions(chars, initPosition, len); + offset = parseTimezone(chars, pos); + } else { + throw new DateTimeParseException("No timezone offset information", new String(chars), pos.getIndex()); + } + } else if (remaining >= 1 && checkPositionContains(chars, pos, PLUS, MINUS)) { + // No fractional sections + offset = parseTimezone(chars, pos); + } else { + throw new DateTimeParseException("Unexpected character at position " + (pos.getIndex()), new String(chars), pos.getIndex()); + } + + return OffsetDateTime.of(year, month, day, hour, minute, seconds, fractions, offset); + } +} diff --git a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java index e8ddfde11f4cc..4fd8986d0b428 100644 --- a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java @@ -779,7 +779,9 @@ public void testSamePrinterOutput() { DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); for (FormatNames format : FormatNames.values()) { - if (format == FormatNames.ISO8601 || format == FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS) { + if (format == FormatNames.ISO8601 + || format == FormatNames.STRICT_DATE_OPTIONAL_TIME_NANOS + || format == FormatNames.RFC3339_LENIENT) { // Nanos aren't supported by joda continue; } diff --git a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java index 681daf1755890..85c9919275c3a 100644 --- a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java @@ -249,10 +249,19 @@ public void testEpochSecondParserWithFraction() { } public void testEpochMilliParsersWithDifferentFormatters() { - DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); - TemporalAccessor accessor = formatter.parse("123"); - assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); - assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); + { + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||epoch_millis"); + TemporalAccessor accessor = formatter.parse("123"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); + assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||epoch_millis"); + TemporalAccessor accessor = formatter.parse("123"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(123L)); + assertThat(formatter.pattern(), is("rfc3339_lenient||epoch_millis")); + } } public void testParsersWithMultipleInternalFormats() throws Exception { @@ -317,6 +326,11 @@ public void testEqualsAndHashcode() { assertThat(epochMillisFormatter.hashCode(), is(DateFormatters.forPattern("epoch_millis").hashCode())); assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis"))); assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis"))); + + DateFormatter rfc339Formatter = DateFormatters.forPattern("rfc3339_lenient"); + assertThat(rfc339Formatter.hashCode(), is(DateFormatters.forPattern("rfc3339_lenient").hashCode())); + assertThat(rfc339Formatter, sameInstance(DateFormatters.forPattern("rfc3339_lenient"))); + assertThat(rfc339Formatter, equalTo(DateFormatters.forPattern("rfc3339_lenient"))); } public void testSupportBackwardsJava8Format() { @@ -461,6 +475,132 @@ public void testIso8601Parsing() { formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00")); } + public void testRFC3339Parsing() { + DateFormatter formatter = DateFormatters.forPattern("rfc3339_lenient"); + + // timezone not allowed with just date + formatter.format(formatter.parse("2018")); + formatter.format(formatter.parse("2018-05")); + formatter.format(formatter.parse("2018-05-15")); + + formatter.format(formatter.parse("2018-05-15T17:14Z")); + formatter.format(formatter.parse("2018-05-15T17:14z")); + formatter.format(formatter.parse("2018-05-15T17:14+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14-01:00")); + + formatter.format(formatter.parse("2018-05-15T17:14:56Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56z")); + formatter.format(formatter.parse("2018-05-15T17:14:56+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56-01:00")); + + // milliseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123+01:00")); + + // microseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456+01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456-01:00")); + + // nanoseconds can be separated using comma or decimal point + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56.123456789-01:00")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789Z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789z")); + formatter.format(formatter.parse("2018-05-15T17:14:56,123456789+01:00")); + + // 1994-11-05T08:15:30-05:00 corresponds to November 5, 1994, 8:15:30 am, US Eastern Standard Time/ + // 1994-11-05T13:15:30Z corresponds to the same instant. + final Instant instantA = DateFormatters.from(formatter.parse("1994-11-05T08:15:30-05:00")).toInstant(); + final Instant instantB = DateFormatters.from(formatter.parse("1994-11-05T13:15:30Z")).toInstant(); + assertThat(instantA, is(instantB)); + + // Invalid dates should throw an exception + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("abc")); + assertThat(e.getMessage(), is("failed to parse date field [abc] with format [rfc3339_lenient]")); + // Invalid offset + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56-00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56-00:00] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.+00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.+00:00] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56_00:00")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56_00:00] with format [rfc3339_lenient]")); + // No offset + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56] with format [rfc3339_lenient]")); + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.123")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.123] with format [rfc3339_lenient]")); + // No end of fraction + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.123")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.123] with format [rfc3339_lenient]")); + // Invalid fraction + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.abcZ")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.abcZ] with format [rfc3339_lenient]")); + // Invalid date + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("201805-15T17:14:56.123456+0000")); + assertThat(e.getMessage(), is("failed to parse date field [201805-15T17:14:56.123456+0000] with format [rfc3339_lenient]")); + // More than 9 digits of nanosecond resolution + e = expectThrows(IllegalArgumentException.class, () -> formatter.parse("2018-05-15T17:14:56.1234567891Z")); + assertThat(e.getMessage(), is("failed to parse date field [2018-05-15T17:14:56.1234567891Z] with format [rfc3339_lenient]")); + } + + public void testRFC3339ParserWithDifferentFormatters() { + { + DateFormatter formatter = DateFormatter.forPattern("strict_date_optional_time||rfc3339_lenient"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56+0100"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(1526400896000L)); + assertThat(formatter.pattern(), is("strict_date_optional_time||rfc3339_lenient")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||strict_date_optional_time"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56.123+0100"); + assertThat(DateFormatters.from(accessor).toInstant().toEpochMilli(), is(1526400896123L)); + assertThat(formatter.pattern(), is("rfc3339_lenient||strict_date_optional_time")); + } + + { + DateFormatter formatter = DateFormatter.forPattern("rfc3339_lenient||strict_date_optional_time"); + TemporalAccessor accessor = formatter.parse("2018-05-15T17:14:56.123456789+0100"); + assertThat(DateFormatters.from(accessor).toInstant().getNano(), is(123456789)); + assertThat(formatter.pattern(), is("rfc3339_lenient||strict_date_optional_time")); + } + } + + public void testRFC3339ParserAgainstDifferentFormatters() { + DateFormatter rfc3339Formatter = DateFormatter.forPattern("rfc3339_lenient"); + { + DateFormatter isoFormatter = DateFormatter.forPattern("strict_date_optional_time"); + + assertDateTimeEquals("2018-05-15T17:14Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56.123Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123-01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123-01:00", rfc3339Formatter, isoFormatter); + + assertDateTimeEquals("2018-05-15T17:14:56.123456Z", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123456789+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56.123456789-01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123456789+01:00", rfc3339Formatter, isoFormatter); + assertDateTimeEquals("2018-05-15T17:14:56,123456789-01:00", rfc3339Formatter, isoFormatter); + } + } + public void testRoundupFormatterWithEpochDates() { assertRoundupFormatter("epoch_millis", "1234567890", 1234567890L); // also check nanos of the epoch_millis formatter if it is rounded up to the nano second @@ -683,4 +823,10 @@ public void testCamelCaseDeprecation() { } } } + + void assertDateTimeEquals(String toTest, DateFormatter candidateParser, DateFormatter baselineParser) { + Instant gotInstant = DateFormatters.from(candidateParser.parse(toTest)).toInstant(); + Instant expectedInstant = DateFormatters.from(baselineParser.parse(toTest)).toInstant(); + assertThat(gotInstant, is(expectedInstant)); + } } From b4306b624e5546dd102a4ed45470564b31d5f3a0 Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Wed, 24 Jan 2024 18:16:45 -0800 Subject: [PATCH 125/178] Add calling class name to mock log appender (#12013) Issue #10799 is caused by a stopped or unstarted appender still being present in the static logger state. I have not been able to find the improper usage of the log appender that would cause this error. To help debug, this commit adds the calling class name to the mock log appender. Any future occurrence should result in an error message with the class name that incorrected configured the logger. I've also added an override to the `stop()` method to force that callers use the close() method that properly cleans up the logger state as opposed to stopping the appender directly. Signed-off-by: Andrew Ross <andrross@amazon.com> --- .../org/opensearch/test/MockLogAppender.java | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java b/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java index 328aaf8a65b1f..59eda7a665d4c 100644 --- a/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java +++ b/test/framework/src/main/java/org/opensearch/test/MockLogAppender.java @@ -35,6 +35,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; import org.apache.logging.log4j.core.filter.RegexFilter; import org.opensearch.common.logging.Loggers; import org.opensearch.common.regex.Regex; @@ -68,11 +69,19 @@ public class MockLogAppender extends AbstractAppender implements AutoCloseable { * write to a closed MockLogAppender instance. */ public static MockLogAppender createForLoggers(Logger... loggers) throws IllegalAccessException { - return createForLoggers(".*(\n.*)*", loggers); + final String callingClass = Thread.currentThread().getStackTrace()[2].getClassName(); + return createForLoggersInternal(callingClass, ".*(\n.*)*", loggers); } public static MockLogAppender createForLoggers(String filter, Logger... loggers) throws IllegalAccessException { + final String callingClass = Thread.currentThread().getStackTrace()[2].getClassName(); + return createForLoggersInternal(callingClass, filter, loggers); + } + + private static MockLogAppender createForLoggersInternal(String callingClass, String filter, Logger... loggers) + throws IllegalAccessException { final MockLogAppender appender = new MockLogAppender( + callingClass + "-mock-log-appender", RegexFilter.createFilter(filter, new String[0], false, null, null), Collections.unmodifiableList(Arrays.asList(loggers)) ); @@ -83,8 +92,8 @@ public static MockLogAppender createForLoggers(String filter, Logger... loggers) return appender; } - private MockLogAppender(RegexFilter filter, List<Logger> loggers) { - super("mock", filter, null); + private MockLogAppender(String name, RegexFilter filter, List<Logger> loggers) { + super(name, filter, null, true, Property.EMPTY_ARRAY); /* * We use a copy-on-write array list since log messages could be appended while we are setting up expectations. When that occurs, * we would run into a concurrent modification exception from the iteration over the expectations in #append, concurrent with a @@ -116,7 +125,14 @@ public void close() { for (Logger logger : loggers) { Loggers.removeAppender(logger, this); } - this.stop(); + super.stop(); + } + + @Override + public void stop() { + // MockLogAppender should be used with try-with-resources to ensure + // proper clean up ordering and should never be stopped directly. + throw new UnsupportedOperationException("Use close() to ensure proper clean up ordering"); } public interface LoggingExpectation { From e017a9c48815aa3f3f2ec199a3a9f5793471b0ae Mon Sep 17 00:00:00 2001 From: Bhumika Saini <sabhumik@amazon.com> Date: Thu, 25 Jan 2024 18:13:19 +0530 Subject: [PATCH 126/178] Fix limit check for listing S3 objects (#12018) * Fix limit check for listing S3 objects Signed-off-by: Bhumika Saini <sabhumik@amazon.com> * Apply spotless fix Signed-off-by: Bhumika Saini <sabhumik@amazon.com> --------- Signed-off-by: Bhumika Saini <sabhumik@amazon.com> --- .../org/opensearch/repositories/s3/S3BlobContainer.java | 2 +- .../repositories/s3/S3BlobStoreContainerTests.java | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java index 3a55fcb0bdbcd..25f361b40636e 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3BlobContainer.java @@ -514,7 +514,7 @@ private static List<ListObjectsV2Response> executeListing( for (ListObjectsV2Response listObjectsV2Response : listObjectsIterable) { results.add(listObjectsV2Response); totalObjects += listObjectsV2Response.contents().size(); - if (limit != -1 && totalObjects > limit) { + if (limit != -1 && totalObjects >= limit) { break; } } diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java index 58ad290a31e85..2b45e9cfe2d4b 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3BlobStoreContainerTests.java @@ -916,6 +916,15 @@ public void testListBlobsByPrefixInLexicographicOrderWithLimitLessThanPageSize() testListBlobsByPrefixInLexicographicOrder(2, 1, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); } + /** + * Test the boundary value at page size to ensure + * unnecessary calls are not made to S3 by fetching the next page. + * @throws IOException + */ + public void testListBlobsByPrefixInLexicographicOrderWithLimitEqualToPageSize() throws IOException { + testListBlobsByPrefixInLexicographicOrder(5, 1, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); + } + public void testListBlobsByPrefixInLexicographicOrderWithLimitGreaterThanPageSize() throws IOException { testListBlobsByPrefixInLexicographicOrder(8, 2, BlobContainer.BlobNameSortOrder.LEXICOGRAPHIC); } From c1bd7526394f0f1fd5f2b02b2a0f1e35dc337068 Mon Sep 17 00:00:00 2001 From: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> Date: Thu, 25 Jan 2024 05:03:40 -0800 Subject: [PATCH 127/178] [Searchable Snapshot] Add Relevant Error handling for Index Creation with remote_snapshot. (#11867) * Block index creation when setting property index.store.type to remote_snaphot Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Adding null check. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * fix failing test Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * remove unnecessary test. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Address comments on PR. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Resolve conflicts on PR. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> * Add changelog entry. Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> --------- Signed-off-by: Rishikesh1159 <rishireddy1159@gmail.com> Signed-off-by: Rishikesh Pasham <62345295+Rishikesh1159@users.noreply.github.com> --- CHANGELOG.md | 1 + .../metadata/MetadataCreateIndexService.java | 12 +++++++ .../MetadataCreateIndexServiceTests.java | 32 +++++++++++++++++++ 3 files changed, 45 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index aa7675edb8cc4..1f847cf3a70ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -242,6 +242,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix tracing context propagation for local transport instrumentation ([#11490](https://github.com/opensearch-project/OpenSearch/pull/11490)) - Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) - Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953)) +- Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867)) ### Security diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 3384393d8feaf..4dde5d0ea013f 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -39,6 +39,7 @@ import org.opensearch.OpenSearchException; import org.opensearch.ResourceAlreadyExistsException; import org.opensearch.Version; +import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.opensearch.action.admin.indices.shrink.ResizeType; @@ -135,6 +136,7 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_TRANSLOG_STORE_REPOSITORY; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; import static org.opensearch.cluster.metadata.Metadata.DEFAULT_REPLICA_COUNT_SETTING; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.isRemoteStoreAttributePresent; @@ -816,6 +818,16 @@ static Settings aggregateIndexSettings( final Settings.Builder requestSettings = Settings.builder().put(request.settings()); final Settings.Builder indexSettingsBuilder = Settings.builder(); + + // Store type of `remote_snapshot` is intended to be system-managed for searchable snapshot indexes so a special case is needed here + // to prevent a user specifying this value when creating an index + String storeTypeSetting = request.settings().get(INDEX_STORE_TYPE_SETTING.getKey()); + if (storeTypeSetting != null && storeTypeSetting.equals(RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT.toString())) { + throw new IllegalArgumentException( + "cannot create index with index setting \"index.store.type\" set to \"remote_snapshot\". Store type can be set to \"remote_snapshot\" only when restoring a remote snapshot by using \"storage_type\": \"remote_snapshot\"" + ); + } + if (sourceMetadata == null) { final Settings.Builder additionalIndexSettings = Settings.builder(); final Settings templateAndRequestSettings = Settings.builder().put(combinedTemplateSettings).put(request.settings()).build(); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index cea151748bfb6..6d1f359d210ac 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -35,6 +35,7 @@ import org.opensearch.ExceptionsHelper; import org.opensearch.ResourceAlreadyExistsException; import org.opensearch.Version; +import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.opensearch.action.admin.indices.shrink.ResizeType; @@ -133,6 +134,7 @@ import static org.opensearch.cluster.metadata.MetadataCreateIndexService.getIndexNumberOfRoutingShards; import static org.opensearch.cluster.metadata.MetadataCreateIndexService.parseV1Mappings; import static org.opensearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; +import static org.opensearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.opensearch.index.IndexSettings.INDEX_REFRESH_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; @@ -146,6 +148,7 @@ import static org.opensearch.node.Node.NODE_ATTRIBUTES; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -1936,6 +1939,35 @@ public void testRequestDurabilityWhenRestrictSettingTrue() { assertEquals(Translog.Durability.REQUEST, INDEX_TRANSLOG_DURABILITY_SETTING.get(indexSettings)); } + public void testIndexCreationWithIndexStoreTypeRemoteStoreThrowsException() { + // This checks that aggregateIndexSettings throws exception for the case when the index setting + // index.store.type is set to remote_snapshot + request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); + final Settings.Builder requestSettings = Settings.builder(); + requestSettings.put(INDEX_STORE_TYPE_SETTING.getKey(), RestoreSnapshotRequest.StorageType.REMOTE_SNAPSHOT); + request.settings(requestSettings.build()); + final IllegalArgumentException error = expectThrows( + IllegalArgumentException.class, + () -> aggregateIndexSettings( + ClusterState.EMPTY_STATE, + request, + Settings.EMPTY, + null, + Settings.EMPTY, + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + randomShardLimitService(), + Collections.emptySet(), + clusterSettings + ) + ); + assertThat( + error.getMessage(), + containsString( + "cannot create index with index setting \"index.store.type\" set to \"remote_snapshot\". Store type can be set to \"remote_snapshot\" only when restoring a remote snapshot by using \"storage_type\": \"remote_snapshot\"" + ) + ); + } + private IndexTemplateMetadata addMatchingTemplate(Consumer<IndexTemplateMetadata.Builder> configurator) { IndexTemplateMetadata.Builder builder = templateMetadataBuilder("template1", "te*"); configurator.accept(builder); From 9f649e03a68b069089600ced6aa566a1de95d79b Mon Sep 17 00:00:00 2001 From: panguixin <panguixin@bytedance.com> Date: Fri, 26 Jan 2024 06:50:37 +0800 Subject: [PATCH 128/178] [BUG] Fix remote shards balancer when filtering throttled nodes (#11724) * fix remote shards balancer Signed-off-by: panguixin <panguixin@bytedance.com> * add change log Signed-off-by: panguixin <panguixin@bytedance.com> --------- Signed-off-by: panguixin <panguixin@bytedance.com> Signed-off-by: Andrew Ross <andrross@amazon.com> Co-authored-by: Andrew Ross <andrross@amazon.com> --- CHANGELOG.md | 1 + .../allocator/RemoteShardsBalancer.java | 22 ++++++++++++++----- .../RemoteShardsBalancerBaseTestCase.java | 5 +++++ 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f847cf3a70ef..aa31819ffae97 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -243,6 +243,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix parsing of single line comments in `lang-painless` ([#11815](https://github.com/opensearch-project/OpenSearch/issues/11815)) - Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953)) - Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867)) +- [BUG] Fix remote shards balancer when filtering throttled nodes ([#11724](https://github.com/opensearch-project/OpenSearch/pull/11724)) ### Security diff --git a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java index e2f24e5f503df..a05938c176678 100644 --- a/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java +++ b/server/src/main/java/org/opensearch/cluster/routing/allocation/allocator/RemoteShardsBalancer.java @@ -43,6 +43,8 @@ public final class RemoteShardsBalancer extends ShardsBalancer { private final Logger logger; private final RoutingAllocation allocation; private final RoutingNodes routingNodes; + // indicates if there are any nodes being throttled for allocating any unassigned shards + private boolean anyNodesThrottled = false; public RemoteShardsBalancer(Logger logger, RoutingAllocation allocation) { this.logger = logger; @@ -358,12 +360,16 @@ private void allocateUnassignedReplicas(Queue<RoutingNode> nodeQueue, Map<String } private void ignoreRemainingShards(Map<String, UnassignedIndexShards> unassignedShardMap) { + // If any nodes are throttled during allocation, mark all remaining unassigned shards as THROTTLED + final UnassignedInfo.AllocationStatus status = anyNodesThrottled + ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED + : UnassignedInfo.AllocationStatus.DECIDERS_NO; for (UnassignedIndexShards indexShards : unassignedShardMap.values()) { for (ShardRouting shard : indexShards.getPrimaries()) { - routingNodes.unassigned().ignoreShard(shard, UnassignedInfo.AllocationStatus.DECIDERS_NO, allocation.changes()); + routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); } for (ShardRouting shard : indexShards.getReplicas()) { - routingNodes.unassigned().ignoreShard(shard, UnassignedInfo.AllocationStatus.DECIDERS_NO, allocation.changes()); + routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); } } } @@ -424,11 +430,11 @@ private void allocateUnassignedShards( private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouting shard) { boolean allocated = false; boolean throttled = false; - Set<String> nodesCheckedForShard = new HashSet<>(); + int numNodesToCheck = nodeQueue.size(); while (nodeQueue.isEmpty() == false) { RoutingNode node = nodeQueue.poll(); + --numNodesToCheck; Decision allocateDecision = allocation.deciders().canAllocate(shard, node, allocation); - nodesCheckedForShard.add(node.nodeId()); if (allocateDecision.type() == Decision.Type.YES) { if (logger.isTraceEnabled()) { logger.trace("Assigned shard [{}] to [{}]", shardShortSummary(shard), node.nodeId()); @@ -467,6 +473,10 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti } nodeQueue.offer(node); } else { + if (nodeLevelDecision.type() == Decision.Type.THROTTLE) { + anyNodesThrottled = true; + } + if (logger.isTraceEnabled()) { logger.trace( "Cannot allocate any shard to node: [{}]. Removing from queue. Node level decisions: [{}],[{}]", @@ -478,14 +488,14 @@ private void tryAllocateUnassignedShard(Queue<RoutingNode> nodeQueue, ShardRouti } // Break out if all nodes in the queue have been checked for this shard - if (nodeQueue.stream().allMatch(rn -> nodesCheckedForShard.contains(rn.nodeId()))) { + if (numNodesToCheck == 0) { break; } } } if (allocated == false) { - UnassignedInfo.AllocationStatus status = throttled + UnassignedInfo.AllocationStatus status = (throttled || anyNodesThrottled) ? UnassignedInfo.AllocationStatus.DECIDERS_THROTTLED : UnassignedInfo.AllocationStatus.DECIDERS_NO; routingNodes.unassigned().ignoreShard(shard, status, allocation.changes()); diff --git a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java index a1db6cd83ab6c..6a03a1f79bcde 100644 --- a/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java +++ b/server/src/test/java/org/opensearch/cluster/routing/allocation/RemoteShardsBalancerBaseTestCase.java @@ -229,6 +229,11 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing return Decision.ALWAYS; } } + + @Override + public Decision canAllocateAnyShardToNode(RoutingNode node, RoutingAllocation allocation) { + return throttle ? Decision.THROTTLE : Decision.YES; + } }); Collections.shuffle(deciders, random()); return new AllocationDeciders(deciders); From 6e744f591a972e1217096802cc2e6f43eb701c2d Mon Sep 17 00:00:00 2001 From: Sachin Kale <sachinpkale@gmail.com> Date: Sat, 27 Jan 2024 01:09:25 +0530 Subject: [PATCH 129/178] Fetch all the locks for a shard to avoid multiple remote store calls (#11987) * Fetch all the locks for a shard to avoid multiple calls Signed-off-by: Sachin Kale <kalsac@amazon.com> * Fix lock file comparison issue Signed-off-by: Sachin Kale <kalsac@amazon.com> * Add unit tests Signed-off-by: Sachin Kale <kalsac@amazon.com> * Add more unit tests Signed-off-by: Sachin Kale <kalsac@amazon.com> * Address PR comments Signed-off-by: Sachin Kale <kalsac@amazon.com> --------- Signed-off-by: Sachin Kale <kalsac@amazon.com> Co-authored-by: Sachin Kale <kalsac@amazon.com> --- .../store/RemoteSegmentStoreDirectory.java | 26 +++++----- .../RemoteStoreMetadataLockManager.java | 8 +++- .../RemoteSegmentStoreDirectoryTests.java | 47 +++++++++++++++++++ .../RemoteStoreMetadataLockManagerTests.java | 23 +++++++++ 4 files changed, 88 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java index 9c1e902606cab..dab99fd25b192 100644 --- a/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java +++ b/server/src/main/java/org/opensearch/index/store/RemoteSegmentStoreDirectory.java @@ -164,7 +164,7 @@ public RemoteSegmentMetadata init() throws IOException { */ public RemoteSegmentMetadata initializeToSpecificCommit(long primaryTerm, long commitGeneration, String acquirerId) throws IOException { String metadataFilePrefix = MetadataFilenameUtils.getMetadataFilePrefixForCommit(primaryTerm, commitGeneration); - String metadataFile = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLock(metadataFilePrefix, acquirerId); + String metadataFile = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLockedMetadataFile(metadataFilePrefix, acquirerId); RemoteSegmentMetadata remoteSegmentMetadata = readMetadataFile(metadataFile); if (remoteSegmentMetadata != null) { this.segmentsUploadedToRemoteStore = new ConcurrentHashMap<>(remoteSegmentMetadata.getMetadata()); @@ -749,20 +749,16 @@ public void deleteStaleSegments(int lastNMetadataFilesToKeep) throws IOException lastNMetadataFilesToKeep, sortedMetadataFileList.size() ); - List<String> metadataFilesToBeDeleted = metadataFilesEligibleToDelete.stream().filter(metadataFile -> { - try { - return !isLockAcquired(metadataFile); - } catch (IOException e) { - logger.error( - "skipping metadata file (" - + metadataFile - + ") deletion for this run," - + " as checking lock for metadata is failing with error: " - + e - ); - return false; - } - }).collect(Collectors.toList()); + Set<String> allLockFiles; + try { + allLockFiles = ((RemoteStoreMetadataLockManager) mdLockManager).fetchLockedMetadataFiles(MetadataFilenameUtils.METADATA_PREFIX); + } catch (Exception e) { + logger.error("Exception while fetching segment metadata lock files, skipping deleteStaleSegments", e); + return; + } + List<String> metadataFilesToBeDeleted = metadataFilesEligibleToDelete.stream() + .filter(metadataFile -> allLockFiles.contains(metadataFile) == false) + .collect(Collectors.toList()); sortedMetadataFileList.removeAll(metadataFilesToBeDeleted); logger.debug( diff --git a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java index 5ebd00f59ef49..9c29e03c225e4 100644 --- a/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java +++ b/server/src/main/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManager.java @@ -21,6 +21,7 @@ import java.util.Collection; import java.util.List; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; /** @@ -75,7 +76,7 @@ public void release(LockInfo lockInfo) throws IOException { } } - public String fetchLock(String filenamePrefix, String acquirerId) throws IOException { + public String fetchLockedMetadataFile(String filenamePrefix, String acquirerId) throws IOException { Collection<String> lockFiles = lockDirectory.listFilesByPrefix(filenamePrefix); List<String> lockFilesForAcquirer = lockFiles.stream() .filter(lockFile -> acquirerId.equals(FileLockInfo.LockFileUtils.getAcquirerIdFromLock(lockFile))) @@ -88,6 +89,11 @@ public String fetchLock(String filenamePrefix, String acquirerId) throws IOExcep return lockFilesForAcquirer.get(0); } + public Set<String> fetchLockedMetadataFiles(String filenamePrefix) throws IOException { + Collection<String> lockFiles = lockDirectory.listFilesByPrefix(filenamePrefix); + return lockFiles.stream().map(FileLockInfo.LockFileUtils::getFileToLockNameFromLock).collect(Collectors.toSet()); + } + /** * Checks whether a given file have any lock on it or not. * @param lockInfo File Lock Info instance for which we need to check if lock is acquired. diff --git a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java index 2c6c4afed69fd..7944ee681f5fc 100644 --- a/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java +++ b/server/src/test/java/org/opensearch/index/store/RemoteSegmentStoreDirectoryTests.java @@ -988,6 +988,53 @@ public void testDeleteStaleCommitsActualDelete() throws Exception { verify(remoteMetadataDirectory).deleteFile(metadataFilename3); } + public void testDeleteStaleCommitsActualDeleteWithLocks() throws Exception { + Map<String, Map<String, String>> metadataFilenameContentMapping = populateMetadata(); + remoteSegmentStoreDirectory.init(); + + // Locking one of the metadata files to ensure that it is not getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenReturn(Set.of(metadataFilename2)); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + for (String metadata : metadataFilenameContentMapping.get(metadataFilename3).values()) { + String uploadedFilename = metadata.split(RemoteSegmentStoreDirectory.UploadedSegmentMetadata.SEPARATOR)[1]; + verify(remoteDataDirectory).deleteFile(uploadedFilename); + } + assertBusy(() -> assertThat(remoteSegmentStoreDirectory.canDeleteStaleCommits.get(), is(true))); + verify(remoteMetadataDirectory).deleteFile(metadataFilename3); + verify(remoteMetadataDirectory, times(0)).deleteFile(metadataFilename2); + } + + public void testDeleteStaleCommitsNoDeletesDueToLocks() throws Exception { + remoteSegmentStoreDirectory.init(); + + // Locking all the old metadata files to ensure that none of the segment files are getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenReturn(Set.of(metadataFilename2, metadataFilename3)); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + assertBusy(() -> assertThat(remoteSegmentStoreDirectory.canDeleteStaleCommits.get(), is(true))); + verify(remoteMetadataDirectory, times(0)).deleteFile(any()); + } + + public void testDeleteStaleCommitsExceptionWhileFetchingLocks() throws Exception { + remoteSegmentStoreDirectory.init(); + + // Locking one of the metadata files to ensure that it is not getting deleted. + when(mdLockManager.fetchLockedMetadataFiles(any())).thenThrow(new RuntimeException("Rate limit exceeded")); + + // popluateMetadata() adds stub to return 3 metadata files + // We are passing lastNMetadataFilesToKeep=2 here so that oldest 1 metadata file will be deleted + remoteSegmentStoreDirectory.deleteStaleSegmentsAsync(1); + + verify(remoteMetadataDirectory, times(0)).deleteFile(any()); + } + public void testDeleteStaleCommitsDeleteDedup() throws Exception { Map<String, Map<String, String>> metadataFilenameContentMapping = new HashMap<>(populateMetadata()); metadataFilenameContentMapping.put(metadataFilename4, metadataFilenameContentMapping.get(metadataFilename3)); diff --git a/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java b/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java index b4eac2c4548d5..299100b65a43e 100644 --- a/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java +++ b/server/src/test/java/org/opensearch/index/store/lockmanager/RemoteStoreMetadataLockManagerTests.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import java.util.Set; import junit.framework.TestCase; @@ -96,4 +97,26 @@ public void testIsAcquiredExceptionCase() { // metadata file is not passed durin FileLockInfo testLockInfo = FileLockInfo.getLockInfoBuilder().withAcquirerId(testAcquirerId).build(); assertThrows(IllegalArgumentException.class, () -> remoteStoreMetadataLockManager.isAcquired(testLockInfo)); } + + public void testFetchLocksEmpty() throws IOException { + when(lockDirectory.listFilesByPrefix("metadata")).thenReturn(Set.of()); + assertEquals(0, remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata").size()); + } + + public void testFetchLocksNonEmpty() throws IOException { + String metadata1 = "metadata_1_2_3"; + String metadata2 = "metadata_4_5_6"; + when(lockDirectory.listFilesByPrefix("metadata")).thenReturn( + Set.of( + FileLockInfo.LockFileUtils.generateLockName(metadata1, "snapshot1"), + FileLockInfo.LockFileUtils.generateLockName(metadata2, "snapshot2") + ) + ); + assertEquals(Set.of(metadata1, metadata2), remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata")); + } + + public void testFetchLocksException() throws IOException { + when(lockDirectory.listFilesByPrefix("metadata")).thenThrow(new IOException("Something went wrong")); + assertThrows(IOException.class, () -> remoteStoreMetadataLockManager.fetchLockedMetadataFiles("metadata")); + } } From 2001d5960133958f359f5713030bc279469d0070 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Fri, 26 Jan 2024 14:53:49 -0500 Subject: [PATCH 130/178] Bump Netty to 4.1.106.Final (#12034) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 2 +- buildSrc/version.properties | 2 +- .../licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-buffer-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.106.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-socks-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-socks-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.106.Final.jar.sha1 | 1 + .../repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 | 1 - .../repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.106.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 | 1 + .../repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 - .../repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.106.Final.jar.sha1 | 1 + .../netty-transport-classes-epoll-4.1.104.Final.jar.sha1 | 1 - .../netty-transport-classes-epoll-4.1.106.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.106.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.106.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 | 1 + .../transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 - .../transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-buffer-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-buffer-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-dns-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-dns-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-codec-http2-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-codec-http2-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-common-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-common-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-handler-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-handler-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 | 1 + .../licenses/netty-transport-4.1.104.Final.jar.sha1 | 1 - .../licenses/netty-transport-4.1.106.Final.jar.sha1 | 1 + .../netty-transport-native-unix-common-4.1.104.Final.jar.sha1 | 1 - .../netty-transport-native-unix-common-4.1.106.Final.jar.sha1 | 1 + 90 files changed, 46 insertions(+), 46 deletions(-) delete mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 delete mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 create mode 100644 modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index aa31819ffae97..b07373c0fd188 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -152,7 +152,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) - Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) - Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) -- Bump `netty` from 4.1.100.Final to 4.1.104.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)) +- Bump `netty` from 4.1.100.Final to 4.1.106.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)), [#12034](https://github.com/opensearch-project/OpenSearch/pull/12034)) - Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 8f8e445b04437..2dbb10a8231d9 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -26,7 +26,7 @@ jakarta_annotation = 1.3.5 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 -netty = 4.1.104.Final +netty = 4.1.106.Final joda = 2.12.2 # project reactor diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 0232fc58f9357..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..005cc2388bd89 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +9d4b3315bb625ec2d73fa569fb6bce4589243d5e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 deleted file mode 100644 index 3b6cd3524d978..0000000000000 --- a/plugins/repository-azure/licenses/netty-codec-socks-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5b126ceba61275f38297cacd5ea0cd6d3addee04 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..4fc3f5c43edb0 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-codec-socks-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +129f9bd6aa0cf28bfd7d45a8b6a598eed9c67702 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9d01e814971f2..0000000000000 --- a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50a2d899a8f8a68daed1a9b6d7750184310cc45f \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..3dcfa7f26db06 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-handler-proxy-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +bffc88b7c56d7b553453e4244247a1b0ba1fdc8a \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 987b524aedc98..0000000000000 --- a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..0617c6999c586 --- /dev/null +++ b/plugins/repository-azure/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +3970474ed55aa1e3e8de5a5602c342c6b8155371 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/repository-azure/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9110503f67304..0000000000000 --- a/plugins/repository-hdfs/licenses/netty-all-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d75246285e5fac6f6dad47e387ed4f46f36e521d \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 b/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..f9affd7887093 --- /dev/null +++ b/plugins/repository-hdfs/licenses/netty-all-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +6ec5bd2be65d5529e58e9e482b747c1135b3736b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/repository-s3/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/repository-s3/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/repository-s3/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/repository-s3/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/repository-s3/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/repository-s3/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 deleted file mode 100644 index 522d85a3bf12e..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -700fdbabab44709b0eccffe8f91c4226a5787356 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..58b668b7e80a9 --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-classes-epoll-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c058d5c712e00e8560e519970b3d27747778b8f2 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/repository-s3/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/transport-nio/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/transport-nio/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/transport-nio/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/transport-nio/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/transport-nio/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/transport-nio/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/transport-nio/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/transport-nio/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/transport-nio/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 deleted file mode 100644 index 30f215e47f8ad..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -756797174b94a3aee11ce83522473f3c18287a43 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..1f170375e9347 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-buffer-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +c5495ba59a627641b3a7c23f6bcb801874c7f7b0 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 deleted file mode 100644 index 9ed9b896d4b4e..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f51fcfd3baac88b2c0b8dc715932ad5622d17429 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..a75ea81b7ee03 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +025171b63aa1e7a5fd8a7e4e660d6d3110241ea7 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 0232fc58f9357..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e237ce67ab230ed1ba749d6651b278333c21b3f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..005cc2388bd89 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +9d4b3315bb625ec2d73fa569fb6bce4589243d5e \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 deleted file mode 100644 index 478e7cfba1470..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2db1556de1b8dc07695604bf51a0a133263ad63f \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..7f5d1adbff740 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +21a07cdf0fc46b313fe2248f1275cdbdac0ba87b \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 deleted file mode 100644 index f0242709f34f7..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5bb757929f7c4d1bf12740a378a99643caaad1ac \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..c4a0c1fae8e24 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-codec-http2-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +41515e8c51eeaaddceabdb4f86fbc5dbfc25b70e \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 1b533eea3b3b3..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec5fc4a7c5475eb20805e14f7274aa28872b5ba1 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bda8b9376e992 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +132defb4689f41b51b483b7202b22b6e89fe35fd \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 deleted file mode 100644 index 70777be4dc636..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18c0e659950cdef5f12c36eccfa14cbd2ad2049d \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..749cc807bcce2 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-handler-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +874c970c4ff958b1140dde52bc17e6a9e7cde662 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 deleted file mode 100644 index d7c15af9312fe..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dfa4fe5c3a6eabb7af09902eb63266829876d8a2 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..bd160c07ad0ff --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +e185ae573db04939215f94d6ba869758dcecbde9 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 deleted file mode 100644 index 987b524aedc98..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f1210e5856fecb9182d58c0d33fa6e946b344b40 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..0617c6999c586 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-resolver-dns-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +3970474ed55aa1e3e8de5a5602c342c6b8155371 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 deleted file mode 100644 index 5cacaf11a29ce..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -da7b263b6fedc5add70e78ee8927c8bd2b9bb589 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..2dab4bff2cc0e --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +8fcca44ae16b98e15965093e7696832019fd6f27 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 deleted file mode 100644 index 64797bf11aedc..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.104.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d4da9f7237ac3ac292891e0b2d5364acbce128cf \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 new file mode 100644 index 0000000000000..b4b977fdad7de --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/netty-transport-native-unix-common-4.1.106.Final.jar.sha1 @@ -0,0 +1 @@ +2da179bd95903f0fa73218b8f0d02690c0cfbc94 \ No newline at end of file From 92afd67c570a779cb33846c9359b0cbd6fe56753 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Fri, 26 Jan 2024 14:54:51 -0500 Subject: [PATCH 131/178] Update Mockito to 5.10.0 (#12036) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- buildSrc/version.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 2dbb10a8231d9..a7c0622e7d293 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -55,7 +55,7 @@ bouncycastle=1.76 randomizedrunner = 2.7.1 junit = 4.13.2 hamcrest = 2.1 -mockito = 5.5.0 +mockito = 5.10.0 objenesis = 3.2 bytebuddy = 1.14.7 From 95b3ec36e192550dbe19b722e2d50c161beae103 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Fri, 26 Jan 2024 21:43:05 -0500 Subject: [PATCH 132/178] Bump reactor-core and reactor-netty versions (#12042) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 3 ++- buildSrc/version.properties | 4 ++-- .../licenses/reactor-netty-core-1.1.13.jar.sha1 | 1 - .../licenses/reactor-netty-core-1.1.15.jar.sha1 | 1 + .../licenses/reactor-netty-http-1.1.13.jar.sha1 | 1 - .../licenses/reactor-netty-http-1.1.15.jar.sha1 | 1 + .../licenses/reactor-netty-core-1.1.13.jar.sha1 | 1 - .../licenses/reactor-netty-core-1.1.15.jar.sha1 | 1 + .../licenses/reactor-netty-http-1.1.13.jar.sha1 | 1 - .../licenses/reactor-netty-http-1.1.15.jar.sha1 | 1 + server/licenses/reactor-core-3.5.11.jar.sha1 | 1 - server/licenses/reactor-core-3.5.14.jar.sha1 | 1 + 12 files changed, 9 insertions(+), 8 deletions(-) delete mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 delete mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 create mode 100644 plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 delete mode 100644 server/licenses/reactor-core-3.5.11.jar.sha1 create mode 100644 server/licenses/reactor-core-3.5.14.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index b07373c0fd188..2cd6863092c5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -156,7 +156,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) -- Bump `reactor-netty-core` from 1.1.12 to 1.1.13 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)) +- Bump `reactor-netty-core` from 1.1.12 to 1.1.15 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)), ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) - Bump `com.gradle.enterprise` from 3.14.1 to 3.16.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629)) - Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447)) - Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450)) @@ -180,6 +180,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) - Bump `com.diffplug.spotless` from 6.23.2 to 6.24.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962)) - Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) +- Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index a7c0622e7d293..a888640772a42 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -30,8 +30,8 @@ netty = 4.1.106.Final joda = 2.12.2 # project reactor -reactor_netty = 1.1.13 -reactor = 3.5.11 +reactor_netty = 1.1.15 +reactor = 3.5.14 # client dependencies httpclient5 = 5.2.1 diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 deleted file mode 100644 index 5eaf96739ed72..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-core-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -faea23e582978a34f6a932b81e86206ec2314990 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..c30a99a2338b4 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-core-1.1.15.jar.sha1 @@ -0,0 +1 @@ +3221d405ad55a573cf29875a8244a4217cf07185 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 deleted file mode 100644 index 091125169c696..0000000000000 --- a/plugins/repository-azure/licenses/reactor-netty-http-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5af7bc746050d080891a5446cca2c96a0c51d03 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..ab3171cd02b73 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-netty-http-1.1.15.jar.sha1 @@ -0,0 +1 @@ +c79756fa2dfc28ac81fc9d23a14b17c656c3e560 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 deleted file mode 100644 index 5eaf96739ed72..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -faea23e582978a34f6a932b81e86206ec2314990 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..c30a99a2338b4 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-core-1.1.15.jar.sha1 @@ -0,0 +1 @@ +3221d405ad55a573cf29875a8244a4217cf07185 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 deleted file mode 100644 index 091125169c696..0000000000000 --- a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c5af7bc746050d080891a5446cca2c96a0c51d03 \ No newline at end of file diff --git a/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 new file mode 100644 index 0000000000000..ab3171cd02b73 --- /dev/null +++ b/plugins/transport-reactor-netty4/licenses/reactor-netty-http-1.1.15.jar.sha1 @@ -0,0 +1 @@ +c79756fa2dfc28ac81fc9d23a14b17c656c3e560 \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.11.jar.sha1 b/server/licenses/reactor-core-3.5.11.jar.sha1 deleted file mode 100644 index e5ffdbc8a7840..0000000000000 --- a/server/licenses/reactor-core-3.5.11.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db2299757f562261eb775d13658e86ff06f91e8a \ No newline at end of file diff --git a/server/licenses/reactor-core-3.5.14.jar.sha1 b/server/licenses/reactor-core-3.5.14.jar.sha1 new file mode 100644 index 0000000000000..3b58e7a68bade --- /dev/null +++ b/server/licenses/reactor-core-3.5.14.jar.sha1 @@ -0,0 +1 @@ +6e0c97c2e78273a00fd4ed38016b19ff3c6de59e \ No newline at end of file From 4323af1973c6c19b7ab35754f3ff1d6a5437dce9 Mon Sep 17 00:00:00 2001 From: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:02:20 +0530 Subject: [PATCH 133/178] Adding configurable connection count setting for S3 Sync Client (#12028) * Adding configurable connection count setting for S3 Sync Client Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> * Address PR Comments Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> * Increase number of connections to 500 Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> --------- Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> --- CHANGELOG.md | 1 + .../repositories/s3/S3ClientSettings.java | 20 +++++++++++++++++-- .../opensearch/repositories/s3/S3Service.java | 2 ++ .../s3/S3ClientSettingsTests.java | 2 ++ 4 files changed, 23 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2cd6863092c5b..78b1e0126b50c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - GHA to verify checklist items completion in PR descriptions ([#10800](https://github.com/opensearch-project/OpenSearch/pull/10800)) - Allow to pass the list settings through environment variables (like [], ["a", "b", "c"], ...) ([#10625](https://github.com/opensearch-project/OpenSearch/pull/10625)) - [Admission Control] Integrate CPU AC with ResourceUsageCollector and add CPU AC stats to nodes/stats ([#10887](https://github.com/opensearch-project/OpenSearch/pull/10887)) +- [S3 Repository] Add setting to control connection count for sync client ([#12028](https://github.com/opensearch-project/OpenSearch/pull/12028)) ### Dependencies - Bump `log4j-core` from 2.18.0 to 2.19.0 diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java index 4fda0ee95a3ec..e44f408e6dd12 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3ClientSettings.java @@ -201,6 +201,12 @@ final class S3ClientSettings { key -> Setting.intSetting(key, 500, Property.NodeScope) ); + static final Setting.AffixSetting<Integer> MAX_SYNC_CONNECTIONS_SETTING = Setting.affixKeySetting( + PREFIX, + "max_sync_connections", + key -> Setting.intSetting(key, 500, Property.NodeScope) + ); + /** Connection acquisition timeout for new connections to S3. */ static final Setting.AffixSetting<TimeValue> CONNECTION_ACQUISITION_TIMEOUT = Setting.affixKeySetting( PREFIX, @@ -284,10 +290,13 @@ final class S3ClientSettings { /** The connection TTL for the s3 client */ final int connectionTTLMillis; - /** The max number of connections for the s3 client */ + /** The max number of connections for the s3 async client */ final int maxConnections; - /** The connnection acquisition timeout for the s3 async client */ + /** The max number of connections for the s3 sync client */ + final int maxSyncConnections; + + /** The connnection acquisition timeout for the s3 sync and async client */ final int connectionAcquisitionTimeoutMillis; /** The number of retries to use for the s3 client. */ @@ -318,6 +327,7 @@ private S3ClientSettings( int connectionTimeoutMillis, int connectionTTLMillis, int maxConnections, + int maxSyncConnections, int connectionAcquisitionTimeoutMillis, int maxRetries, boolean throttleRetries, @@ -336,6 +346,7 @@ private S3ClientSettings( this.connectionTimeoutMillis = connectionTimeoutMillis; this.connectionTTLMillis = connectionTTLMillis; this.maxConnections = maxConnections; + this.maxSyncConnections = maxSyncConnections; this.connectionAcquisitionTimeoutMillis = connectionAcquisitionTimeoutMillis; this.maxRetries = maxRetries; this.throttleRetries = throttleRetries; @@ -386,6 +397,9 @@ S3ClientSettings refine(Settings repositorySettings) { ).millis() ); final int newMaxConnections = Math.toIntExact(getRepoSettingOrDefault(MAX_CONNECTIONS_SETTING, normalizedSettings, maxConnections)); + final int newMaxSyncConnections = Math.toIntExact( + getRepoSettingOrDefault(MAX_SYNC_CONNECTIONS_SETTING, normalizedSettings, maxConnections) + ); final int newMaxRetries = getRepoSettingOrDefault(MAX_RETRIES_SETTING, normalizedSettings, maxRetries); final boolean newThrottleRetries = getRepoSettingOrDefault(USE_THROTTLE_RETRIES_SETTING, normalizedSettings, throttleRetries); final boolean newPathStyleAccess = getRepoSettingOrDefault(USE_PATH_STYLE_ACCESS, normalizedSettings, pathStyleAccess); @@ -433,6 +447,7 @@ S3ClientSettings refine(Settings repositorySettings) { newConnectionTimeoutMillis, newConnectionTTLMillis, newMaxConnections, + newMaxSyncConnections, newConnectionAcquisitionTimeoutMillis, newMaxRetries, newThrottleRetries, @@ -563,6 +578,7 @@ static S3ClientSettings getClientSettings(final Settings settings, final String Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_TIMEOUT_SETTING).millis()), Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_TTL_SETTING).millis()), Math.toIntExact(getConfigValue(settings, clientName, MAX_CONNECTIONS_SETTING)), + Math.toIntExact(getConfigValue(settings, clientName, MAX_SYNC_CONNECTIONS_SETTING)), Math.toIntExact(getConfigValue(settings, clientName, CONNECTION_ACQUISITION_TIMEOUT).millis()), getConfigValue(settings, clientName, MAX_RETRIES_SETTING), getConfigValue(settings, clientName, USE_THROTTLE_RETRIES_SETTING), diff --git a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java index 24387fb98a425..fe81da31432f4 100644 --- a/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java +++ b/plugins/repository-s3/src/main/java/org/opensearch/repositories/s3/S3Service.java @@ -279,6 +279,8 @@ protected PasswordAuthentication getPasswordAuthentication() { } clientBuilder.socketTimeout(Duration.ofMillis(clientSettings.readTimeoutMillis)); + clientBuilder.maxConnections(clientSettings.maxSyncConnections); + clientBuilder.connectionAcquisitionTimeout(Duration.ofMillis(clientSettings.connectionAcquisitionTimeoutMillis)); return clientBuilder; } diff --git a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java index f27c8387b6e45..b47749553aeba 100644 --- a/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java +++ b/plugins/repository-s3/src/test/java/org/opensearch/repositories/s3/S3ClientSettingsTests.java @@ -74,6 +74,8 @@ public void testThereIsADefaultClientByDefault() { assertThat(defaultSettings.connectionTimeoutMillis, is(10 * 1000)); assertThat(defaultSettings.connectionTTLMillis, is(5 * 1000)); assertThat(defaultSettings.maxConnections, is(500)); + assertThat(defaultSettings.maxSyncConnections, is(500)); + assertThat(defaultSettings.connectionAcquisitionTimeoutMillis, is(15 * 60 * 1000)); assertThat(defaultSettings.maxRetries, is(3)); assertThat(defaultSettings.throttleRetries, is(true)); } From 1ec5305122b8c1914f65d9c5f93186bc0b81d275 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 09:12:51 -0500 Subject: [PATCH 134/178] Bump com.diffplug.spotless from 6.24.0 to 6.25.0 (#12055) * Bump com.diffplug.spotless from 6.24.0 to 6.25.0 Bumps com.diffplug.spotless from 6.24.0 to 6.25.0. --- updated-dependencies: - dependency-name: com.diffplug.spotless dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 78b1e0126b50c..26e1373151fc0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -179,7 +179,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963)) - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) -- Bump `com.diffplug.spotless` from 6.23.2 to 6.24.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962)) +- Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055)) - Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) - Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) diff --git a/build.gradle b/build.gradle index 4d29ca8664af3..e082967255b10 100644 --- a/build.gradle +++ b/build.gradle @@ -54,7 +54,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.24.0" apply false + id "com.diffplug.spotless" version "6.25.0" apply false id "org.gradle.test-retry" version "1.5.4" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' From c9ddaeffbdba8fd0a4e5c5d4c8949f3ca34f3839 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 09:53:54 -0500 Subject: [PATCH 135/178] Bump com.google.http-client:google-http-client-jackson2 from 1.43.3 to 1.44.1 in /plugins/repository-gcs (#12059) * Bump com.google.http-client:google-http-client-jackson2 Bumps [com.google.http-client:google-http-client-jackson2](https://github.com/googleapis/google-http-java-client) from 1.43.3 to 1.44.1. - [Release notes](https://github.com/googleapis/google-http-java-client/releases) - [Changelog](https://github.com/googleapis/google-http-java-client/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/google-http-java-client/compare/v1.43.3...v1.44.1) --- updated-dependencies: - dependency-name: com.google.http-client:google-http-client-jackson2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + plugins/repository-gcs/build.gradle | 2 +- .../licenses/google-http-client-jackson2-1.43.3.jar.sha1 | 1 - .../licenses/google-http-client-jackson2-1.44.1.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 26e1373151fc0..0ce761fde5b91 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -182,6 +182,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055)) - Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) - Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) +- Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index e0651da8d39c8..2ce67b1f25a80 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -78,7 +78,7 @@ dependencies { api 'com.google.http-client:google-http-client:1.43.3' api 'com.google.http-client:google-http-client-appengine:1.43.3' api 'com.google.http-client:google-http-client-gson:1.43.3' - api 'com.google.http-client:google-http-client-jackson2:1.43.3' + api 'com.google.http-client:google-http-client-jackson2:1.44.1' api 'com.google.oauth-client:google-oauth-client:1.34.1' diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 deleted file mode 100644 index 8380b9fb770b5..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.43.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -689da86469d19a01c726c8c24477b95c8a834bbe \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..4472ffbbebe1c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.44.1.jar.sha1 @@ -0,0 +1 @@ +3f1947de0fd9eb250af16abe6103c11e68d11635 \ No newline at end of file From af57da4cb7f31840c9abc07ebb4e90a9fa55b0c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 10:35:12 -0500 Subject: [PATCH 136/178] Bump com.gradle.enterprise from 3.16.1 to 3.16.2 (#12056) * Bump com.gradle.enterprise from 3.16.1 to 3.16.2 Bumps com.gradle.enterprise from 3.16.1 to 3.16.2. --- updated-dependencies: - dependency-name: com.gradle.enterprise dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- settings.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ce761fde5b91..35a3e59c5470b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -158,7 +158,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.squareup.okhttp3:okhttp` from 4.11.0 to 4.12.0 ([#10861](https://github.com/opensearch-project/OpenSearch/pull/10861)) - Bump `org.apache.commons:commons-text` from 1.10.0 to 1.11.0 ([#11344](https://github.com/opensearch-project/OpenSearch/pull/11344)) - Bump `reactor-netty-core` from 1.1.12 to 1.1.15 ([#11350](https://github.com/opensearch-project/OpenSearch/pull/11350)), ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) -- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.1 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629)) +- Bump `com.gradle.enterprise` from 3.14.1 to 3.16.2 ([#11339](https://github.com/opensearch-project/OpenSearch/pull/11339), [#11629](https://github.com/opensearch-project/OpenSearch/pull/11629), [#12056](https://github.com/opensearch-project/OpenSearch/pull/12056)) - Bump `actions/setup-java` from 3 to 4 ([#11447](https://github.com/opensearch-project/OpenSearch/pull/11447)) - Bump `commons-net:commons-net` from 3.9.0 to 3.10.0 ([#11450](https://github.com/opensearch-project/OpenSearch/pull/11450)) - Bump `org.apache.maven:maven-model` from 3.9.4 to 3.9.6 ([#11445](https://github.com/opensearch-project/OpenSearch/pull/11445)) diff --git a/settings.gradle b/settings.gradle index 24ab4a7a22237..8fbf32504215b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -10,7 +10,7 @@ */ plugins { - id "com.gradle.enterprise" version "3.16.1" + id "com.gradle.enterprise" version "3.16.2" } ext.disableBuildCache = hasProperty('DISABLE_BUILD_CACHE') || System.getenv().containsKey('DISABLE_BUILD_CACHE') From 6012504c2d8577d94d5c763c66823cc33f18b539 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Mon, 29 Jan 2024 12:20:44 -0500 Subject: [PATCH 137/178] Update to Apache Lucene 9.9.2 (#12063) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 2 +- buildSrc/version.properties | 2 +- libs/core/licenses/lucene-core-9.9.1.jar.sha1 | 1 - libs/core/licenses/lucene-core-9.9.2.jar.sha1 | 1 + libs/core/src/main/java/org/opensearch/Version.java | 2 +- .../lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 | 1 - .../lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 | 1 + .../analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 | 1 - .../analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 | 1 + .../licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 | 1 - .../licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 | 1 + .../analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 | 1 - .../analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 | 1 + .../licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 | 1 - .../licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 | 1 + .../licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 | 1 - .../licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 | 1 + .../licenses/lucene-analysis-stempel-9.9.1.jar.sha1 | 1 - .../licenses/lucene-analysis-stempel-9.9.2.jar.sha1 | 1 + .../licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 | 1 - .../licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 | 1 + server/licenses/lucene-analysis-common-9.9.1.jar.sha1 | 1 - server/licenses/lucene-analysis-common-9.9.2.jar.sha1 | 1 + server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 | 1 - server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 | 1 + server/licenses/lucene-core-9.9.1.jar.sha1 | 1 - server/licenses/lucene-core-9.9.2.jar.sha1 | 1 + server/licenses/lucene-grouping-9.9.1.jar.sha1 | 1 - server/licenses/lucene-grouping-9.9.2.jar.sha1 | 1 + server/licenses/lucene-highlighter-9.9.1.jar.sha1 | 1 - server/licenses/lucene-highlighter-9.9.2.jar.sha1 | 1 + server/licenses/lucene-join-9.9.1.jar.sha1 | 1 - server/licenses/lucene-join-9.9.2.jar.sha1 | 1 + server/licenses/lucene-memory-9.9.1.jar.sha1 | 1 - server/licenses/lucene-memory-9.9.2.jar.sha1 | 1 + server/licenses/lucene-misc-9.9.1.jar.sha1 | 1 - server/licenses/lucene-misc-9.9.2.jar.sha1 | 1 + server/licenses/lucene-queries-9.9.1.jar.sha1 | 1 - server/licenses/lucene-queries-9.9.2.jar.sha1 | 1 + server/licenses/lucene-queryparser-9.9.1.jar.sha1 | 1 - server/licenses/lucene-queryparser-9.9.2.jar.sha1 | 1 + server/licenses/lucene-sandbox-9.9.1.jar.sha1 | 1 - server/licenses/lucene-sandbox-9.9.2.jar.sha1 | 1 + server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 | 1 - server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 | 1 + server/licenses/lucene-spatial3d-9.9.1.jar.sha1 | 1 - server/licenses/lucene-spatial3d-9.9.2.jar.sha1 | 1 + server/licenses/lucene-suggest-9.9.1.jar.sha1 | 1 - server/licenses/lucene-suggest-9.9.2.jar.sha1 | 1 + 49 files changed, 26 insertions(+), 26 deletions(-) delete mode 100644 libs/core/licenses/lucene-core-9.9.1.jar.sha1 create mode 100644 libs/core/licenses/lucene-core-9.9.2.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-analysis-common-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-core-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-core-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-join-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-join-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-memory-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-misc-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-queries-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.9.2.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-9.9.1.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.9.2.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 35a3e59c5470b..f2c0229e8c7fc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -175,7 +175,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.maxmind.db:maxmind-db` from 3.0.0 to 3.1.0 ([#11693](https://github.com/opensearch-project/OpenSearch/pull/11693)) - Bump `net.java.dev.jna:jna` from 5.13.0 to 5.14.0 ([#11798](https://github.com/opensearch-project/OpenSearch/pull/11798)) - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) -- Bump `Lucene` from 9.8.0 to 9.9.1 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)) +- Bump `Lucene` from 9.8.0 to 9.9.2 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)), ([#12063](https://github.com/opensearch-project/OpenSearch/pull/12063)) - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963)) - Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index a888640772a42..95ae1ddb578a1 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 3.0.0 -lucene = 9.9.1 +lucene = 9.9.2 bundled_jdk_vendor = adoptium bundled_jdk = 21.0.2+13 diff --git a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 b/libs/core/licenses/lucene-core-9.9.1.jar.sha1 deleted file mode 100644 index ae596196d9e6a..0000000000000 --- a/libs/core/licenses/lucene-core-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/libs/core/licenses/lucene-core-9.9.2.jar.sha1 b/libs/core/licenses/lucene-core-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..2d03d69369b9f --- /dev/null +++ b/libs/core/licenses/lucene-core-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7699f80220fc80b08413902560904623b88beb9f \ No newline at end of file diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 6a92993f5dd42..75f9a0dec3e12 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -99,7 +99,7 @@ public class Version implements Comparable<Version>, ToXContentFragment { public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_1); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version CURRENT = V_3_0_0; public static Version fromId(int id) { diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 deleted file mode 100644 index 402cc36ba3d68..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1782a69d0e83af9cc3c65db0dcd2e7e7c1e5f90e \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..e073455415e24 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.9.2.jar.sha1 @@ -0,0 +1 @@ +045df3828b6460c032a6551040e31ea432b0aad6 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 deleted file mode 100644 index dde9b7c100dc7..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -147cb42a90a29501d9ca6094ea0db1d213f3076a \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b318a2d89db7d --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.9.2.jar.sha1 @@ -0,0 +1 @@ +f7e549fdac07140f4cd379a0f517c38434165e95 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 deleted file mode 100644 index b70a22e9db096..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b034dd3a975763e083c7e11b5d0f7d516ab72590 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..87510efa881bc --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.9.2.jar.sha1 @@ -0,0 +1 @@ +eb19738fd3ca85071fef96978a056a7c94d8e793 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 deleted file mode 100644 index 323f165c62790..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c405f2f7d0fc127d88dfbadd753469b2028fdf52 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..fb3746eb27840 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7e83a83741155ececf9193a4f967e570e170236d \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 deleted file mode 100644 index dd659ddf4de95..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -970e5775876c2d7e1b9af7421a4b17d96f63faf4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..e88299f106bb2 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.9.2.jar.sha1 @@ -0,0 +1 @@ +dabdea96c7a6d00363b0093a580e7d783efa69a4 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 deleted file mode 100644 index ed0e81d8f1f75..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2421e5238e9b8484929291744d709dd743c01da1 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..05dd3328b032d --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.9.2.jar.sha1 @@ -0,0 +1 @@ +fb551d9ae6a1bf12bc90a4d26dd8fa3eefac8cb9 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 deleted file mode 100644 index fd8e000088180..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a23e7de4cd9ae7af285c89dc1c55e0ac3f157fd3 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b47a1e4052407 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.9.2.jar.sha1 @@ -0,0 +1 @@ +0d495b478861b2d29e0a58d273ca0e6e755292e9 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 deleted file mode 100644 index d0e7a3b0c751c..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8d9bce1ea51db279878c51091dd9aefc7b335da4 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..b7d54cb230445 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.9.2.jar.sha1 @@ -0,0 +1 @@ +9958d813d4cfdd890b4611c679ed36775480fa0d \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 deleted file mode 100644 index c9e6120da7497..0000000000000 --- a/server/licenses/lucene-analysis-common-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -24c8401b530308f9568eb7b408c2029c63f564c6 \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 b/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..6eefe1b002fde --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.9.2.jar.sha1 @@ -0,0 +1 @@ +2a4712ee9d87e0f9942a81195dbd5223dbeaa541 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 deleted file mode 100644 index 69ecf6aa68200..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -11c46007366bb037be7d271ab0a5849b1d544662 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 b/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..377acc22a1d6b --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.9.2.jar.sha1 @@ -0,0 +1 @@ +b53707366cf2891bb301f85a44c5108fc2d5d1b5 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.1.jar.sha1 b/server/licenses/lucene-core-9.9.1.jar.sha1 deleted file mode 100644 index ae596196d9e6a..0000000000000 --- a/server/licenses/lucene-core-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55249fa9a0ed321adcf8283c6f3b649a6812b0a9 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.9.2.jar.sha1 b/server/licenses/lucene-core-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..2d03d69369b9f --- /dev/null +++ b/server/licenses/lucene-core-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7699f80220fc80b08413902560904623b88beb9f \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.1.jar.sha1 b/server/licenses/lucene-grouping-9.9.1.jar.sha1 deleted file mode 100644 index e7df056400661..0000000000000 --- a/server/licenses/lucene-grouping-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f2785e17c5c823cc8f41a7ddb4647aaca8ee773 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.9.2.jar.sha1 b/server/licenses/lucene-grouping-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..1e321d751bff4 --- /dev/null +++ b/server/licenses/lucene-grouping-9.9.2.jar.sha1 @@ -0,0 +1 @@ +72b6c47fd80933d259859d38325e3e020c8e017b \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 b/server/licenses/lucene-highlighter-9.9.1.jar.sha1 deleted file mode 100644 index 828c7294aa586..0000000000000 --- a/server/licenses/lucene-highlighter-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -30928513461bf79a5cb057e84da7d34a1e53227d \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.9.2.jar.sha1 b/server/licenses/lucene-highlighter-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..d5c2e2a5504f2 --- /dev/null +++ b/server/licenses/lucene-highlighter-9.9.2.jar.sha1 @@ -0,0 +1 @@ +0968d74d6794af65798819304f10ceb947080332 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.1.jar.sha1 b/server/licenses/lucene-join-9.9.1.jar.sha1 deleted file mode 100644 index 34b44ca8c6ad5..0000000000000 --- a/server/licenses/lucene-join-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9c8cc99632280148f92b4c0a64111c482d5d0ac \ No newline at end of file diff --git a/server/licenses/lucene-join-9.9.2.jar.sha1 b/server/licenses/lucene-join-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..60da109fb5c83 --- /dev/null +++ b/server/licenses/lucene-join-9.9.2.jar.sha1 @@ -0,0 +1 @@ +902e4b65686e29c1489a7cdf43ed2ce3b7fe8ace \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.1.jar.sha1 b/server/licenses/lucene-memory-9.9.1.jar.sha1 deleted file mode 100644 index b75fba4c331e9..0000000000000 --- a/server/licenses/lucene-memory-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49f820b1b321860fa42a4f7583e8ed8f77b9c1c2 \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.9.2.jar.sha1 b/server/licenses/lucene-memory-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..bebb36d7ed684 --- /dev/null +++ b/server/licenses/lucene-memory-9.9.2.jar.sha1 @@ -0,0 +1 @@ +08175beb4135c23e6918c822381cd51cd2e118a8 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.1.jar.sha1 b/server/licenses/lucene-misc-9.9.1.jar.sha1 deleted file mode 100644 index f1e1e056004e9..0000000000000 --- a/server/licenses/lucene-misc-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db7c30217602dfcda394a4d0f0a9e68140d385a6 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.9.2.jar.sha1 b/server/licenses/lucene-misc-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..15f572ec715bf --- /dev/null +++ b/server/licenses/lucene-misc-9.9.2.jar.sha1 @@ -0,0 +1 @@ +39b015aa603db42b55f0833193bdfc75d38842ba \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.1.jar.sha1 b/server/licenses/lucene-queries-9.9.1.jar.sha1 deleted file mode 100644 index 888b9b4a05ec8..0000000000000 --- a/server/licenses/lucene-queries-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d157547bd24edc8e9d9d59c273107dc3ac5fde5e \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.9.2.jar.sha1 b/server/licenses/lucene-queries-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..9a94e40b69e85 --- /dev/null +++ b/server/licenses/lucene-queries-9.9.2.jar.sha1 @@ -0,0 +1 @@ +671252594e15280818f56d4b369bc48c0ab00dac \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 b/server/licenses/lucene-queryparser-9.9.1.jar.sha1 deleted file mode 100644 index 1ce8a069a0f4e..0000000000000 --- a/server/licenses/lucene-queryparser-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -12d844fe224f6f97c510ac20d68903ed7f626f6c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.9.2.jar.sha1 b/server/licenses/lucene-queryparser-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..f23292999ae6a --- /dev/null +++ b/server/licenses/lucene-queryparser-9.9.2.jar.sha1 @@ -0,0 +1 @@ +7cf8dea63ab511ea9a322916198c3abd0402c8b2 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 b/server/licenses/lucene-sandbox-9.9.1.jar.sha1 deleted file mode 100644 index 14fd86dadc404..0000000000000 --- a/server/licenses/lucene-sandbox-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -272e588fd3d8c0a401b28a1ac715f27044bf62ec \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.9.2.jar.sha1 b/server/licenses/lucene-sandbox-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..a30f00d72a689 --- /dev/null +++ b/server/licenses/lucene-sandbox-9.9.2.jar.sha1 @@ -0,0 +1 @@ +4e2bdc6a5b65dd15b78477cf2c49cf7b707ba757 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 deleted file mode 100644 index 0efd5a7595bfe..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e066432e7ab02b2a4914f989bcd8c44adbf340ad \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 b/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..dafeee401659a --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.9.2.jar.sha1 @@ -0,0 +1 @@ +2693d4d4d3b9a0bf5b632ccbb01132eade1c4e14 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 deleted file mode 100644 index 7f06466e4c721..0000000000000 --- a/server/licenses/lucene-spatial3d-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fa54c9b962778e28ebc0efb9f75297781350361a \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 b/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..47a1c05187098 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.9.2.jar.sha1 @@ -0,0 +1 @@ +61866a44eafce499ebbc20bcb75c8b8a60a5449b \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.1.jar.sha1 b/server/licenses/lucene-suggest-9.9.1.jar.sha1 deleted file mode 100644 index 06732480d1b6c..0000000000000 --- a/server/licenses/lucene-suggest-9.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9554de5b22ae7483b344b94a9a956960b7a5d49c \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.9.2.jar.sha1 b/server/licenses/lucene-suggest-9.9.2.jar.sha1 new file mode 100644 index 0000000000000..115983c932983 --- /dev/null +++ b/server/licenses/lucene-suggest-9.9.2.jar.sha1 @@ -0,0 +1 @@ +352ad5296f48a482e2b65bb7d5d6ba2977bb035e \ No newline at end of file From 0cd06fa436d9f52009d868fb253318def652a600 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Mon, 29 Jan 2024 15:00:36 -0500 Subject: [PATCH 138/178] Update Apache Lucene version for 2.12.0 release (#12072) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- libs/core/src/main/java/org/opensearch/Version.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 75f9a0dec3e12..307da89c18d48 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -98,7 +98,7 @@ public class Version implements Comparable<Version>, ToXContentFragment { public static final Version V_2_11_0 = new Version(2110099, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_1 = new Version(2110199, org.apache.lucene.util.Version.LUCENE_9_7_0); public static final Version V_2_11_2 = new Version(2110299, org.apache.lucene.util.Version.LUCENE_9_7_0); - public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_1); + public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version CURRENT = V_3_0_0; From 1f8b62fed81424576184dc9ef1ebe69f5156c904 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 13:14:43 -0800 Subject: [PATCH 139/178] Bump peter-evans/create-issue-from-file from 4 to 5 (#12057) * Bump peter-evans/create-issue-from-file from 4 to 5 Bumps [peter-evans/create-issue-from-file](https://github.com/peter-evans/create-issue-from-file) from 4 to 5. - [Release notes](https://github.com/peter-evans/create-issue-from-file/releases) - [Commits](https://github.com/peter-evans/create-issue-from-file/compare/v4...v5) --- updated-dependencies: - dependency-name: peter-evans/create-issue-from-file dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- .github/workflows/create-documentation-issue.yml | 2 +- CHANGELOG.md | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml index df63847f8afca..b45e053cc25c2 100644 --- a/.github/workflows/create-documentation-issue.yml +++ b/.github/workflows/create-documentation-issue.yml @@ -29,7 +29,7 @@ jobs: - name: Create Issue From File id: create-issue - uses: peter-evans/create-issue-from-file@v4 + uses: peter-evans/create-issue-from-file@v5 with: title: Add documentation related to new feature content-filepath: ./ci/documentation/issue.md diff --git a/CHANGELOG.md b/CHANGELOG.md index f2c0229e8c7fc..62497d021b6bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -183,6 +183,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) - Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) - Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059)) +- Bump `peter-evans/create-issue-from-file` from 4 to 5 ([#12057](https://github.com/opensearch-project/OpenSearch/pull/12057)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) From cc9ee9d572dc395f6de81fd4d393fe9f1962fa50 Mon Sep 17 00:00:00 2001 From: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> Date: Mon, 29 Jan 2024 16:06:17 -0800 Subject: [PATCH 140/178] Updating Ip fields to use doc_values to search (#11508) * Updating Ip fields to use doc_values to search Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix IP tests Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix skip to allow yaml test to pass on main Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Update tests to use existing test file Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Changing skip version to match bwc Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Using exact match instead of range Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Spotless Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix IP field tests Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix spotless + precommit failure Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Get point out of query and into value Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Fix term tests Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> * Add skip test logic to only doc_values test Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> --------- Signed-off-by: Harsha Vamsi Kalluri <harshavamsi096@gmail.com> --- CHANGELOG.md | 1 + .../test/search/340_doc_values_field.yml | 136 +++++++++++--- .../index/mapper/IpFieldMapper.java | 67 +++++-- .../index/mapper/IpFieldTypeTests.java | 172 ++++++++++++++++-- 4 files changed, 323 insertions(+), 53 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62497d021b6bd..d0feea5e2ded3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -216,6 +216,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Ensure Jackson default maximums introduced in 2.16.0 do not conflict with OpenSearch settings ([#11890](https://github.com/opensearch-project/OpenSearch/pull/11890)) - Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000)) - Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) +- Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508)) ### Deprecated diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml index c7b00d5fbbef2..d5ece1719dc48 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/340_doc_values_field.yml @@ -1,8 +1,3 @@ -setup: - - skip: - features: [ "headers" ] - version: " - 2.11.99" - reason: "searching with only doc_values was added in 2.12.0" --- "search on fields with both index and doc_values enabled": - do: @@ -47,6 +42,10 @@ setup: type: unsigned_long index: true doc_values: true + ip_field: + type: ip + index: true + doc_values: true - do: bulk: @@ -54,11 +53,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-iodvq", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-iodvq", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-iodvq", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -162,7 +161,6 @@ setup: - match: { hits.total: 1 } - - do: search: rest_total_hits_as_int: true @@ -174,6 +172,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + term: + ip_field: "192.168.0.1" + + - match: {hits.total: 1} - do: search: @@ -186,7 +194,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -264,6 +271,17 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + terms: + ip_field: ["192.168.0.1", "192.168.0.2"] + + - match: { hits.total: 2 } + - do: search: rest_total_hits_as_int: true @@ -384,6 +402,19 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-iodvq + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } + --- "search on fields with only index enabled": - do: @@ -428,6 +459,10 @@ setup: type: unsigned_long index: true doc_values: false + ip_field: + type: ip + index: true + doc_values: false - do: bulk: @@ -435,11 +470,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-index", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-index", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-index", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -465,7 +500,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -555,6 +589,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + term: + ip_field: "192.168.0.1" + + - match: {hits.total: 1} - do: search: @@ -567,7 +611,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -645,6 +688,17 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + terms: + ip_field: ["192.168.0.1", "192.168.0.2"] + + - match: { hits.total: 2 } + - do: search: rest_total_hits_as_int: true @@ -765,8 +819,24 @@ setup: - match: { hits.total: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test-index + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } --- "search on fields with only doc_values enabled": + - skip: + features: [ "headers" ] + version: " - 2.99.99" + reason: "searching with only doc_values was added in 3.0.0" - do: indices.create: index: test-doc-values @@ -809,6 +879,10 @@ setup: type: unsigned_long index: false doc_values: true + ip_field: + type: ip + index: false + doc_values: true - do: bulk: @@ -816,11 +890,11 @@ setup: refresh: true body: - '{"index": {"_index": "test-doc-values", "_id": "1" }}' - - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800 }' + - '{ "some_keyword": "ingesting some random keyword data", "byte": 120, "double": 100.0, "float": "800.0", "half_float": "400.0", "integer": 1290, "long": 13456, "short": 150, "unsigned_long": 10223372036854775800, "ip_field": "192.168.0.1" }' - '{ "index": { "_index": "test-doc-values", "_id": "2" }}' - - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801 }' + - '{ "some_keyword": "400", "byte": 121, "double": 101.0, "float": "801.0", "half_float": "401.0", "integer": 1291, "long": 13457, "short": 151, "unsigned_long": 10223372036854775801, "ip_field": "192.168.0.2" }' - '{ "index": { "_index": "test-doc-values", "_id": "3" } }' - - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802 }' + - '{ "some_keyword": "5", "byte": 122, "double": 102.0, "float": "802.0", "half_float": "402.0", "integer": 1292, "long": 13458, "short": 152, "unsigned_long": 10223372036854775802, "ip_field": "192.168.0.3" }' - do: search: @@ -846,7 +920,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -924,7 +997,6 @@ setup: - match: { hits.total: 1 } - - do: search: rest_total_hits_as_int: true @@ -936,6 +1008,16 @@ setup: - match: { hits.total: 1 } + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + term: + ip_field: "192.168.0.3" + + - match: { hits.total: 1 } - do: search: @@ -948,7 +1030,6 @@ setup: - match: { hits.total: 2 } - - do: search: rest_total_hits_as_int: true @@ -1145,3 +1226,16 @@ setup: } - match: { hits.total: 2 } + + - do: + search: + rest_total_hits_as_int: true + index: test-doc-values + body: + query: + range: + ip_field: + gte: "192.168.0.1" + lte: "192.168.0.2" + + - match: { hits.total: 2 } diff --git a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java index 2a677d8bc1352..db8da8a949d6f 100644 --- a/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/IpFieldMapper.java @@ -36,7 +36,9 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; @@ -222,25 +224,48 @@ protected Object parseSourceValue(Object value) { @Override public Query termQuery(Object value, @Nullable QueryShardContext context) { - failIfNotIndexed(); + failIfNotIndexedAndNoDocValues(); + Query query; if (value instanceof InetAddress) { - return InetAddressPoint.newExactQuery(name(), (InetAddress) value); + query = InetAddressPoint.newExactQuery(name(), (InetAddress) value); } else { if (value instanceof BytesRef) { value = ((BytesRef) value).utf8ToString(); } + String term = value.toString(); + if (term.contains("/")) { + final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); + query = InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); + } else { + InetAddress address = InetAddresses.forString(term); + query = InetAddressPoint.newExactQuery(name(), address); + } + } + if (isSearchable() && hasDocValues()) { + String term = value.toString(); + if (term.contains("/")) { + final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); + return InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); + } + return new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery(name(), new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ); + } + if (hasDocValues()) { String term = value.toString(); if (term.contains("/")) { final Tuple<InetAddress, Integer> cidr = InetAddresses.parseCidr(term); return InetAddressPoint.newPrefixQuery(name(), cidr.v1(), cidr.v2()); } - InetAddress address = InetAddresses.forString(term); - return InetAddressPoint.newExactQuery(name(), address); + return SortedSetDocValuesField.newSlowExactQuery(name(), new BytesRef(((PointRangeQuery) query).getLowerPoint())); } + return query; } @Override public Query termsQuery(List<?> values, QueryShardContext context) { + failIfNotIndexedAndNoDocValues(); InetAddress[] addresses = new InetAddress[values.size()]; int i = 0; for (Object value : values) { @@ -265,14 +290,32 @@ public Query termsQuery(List<?> values, QueryShardContext context) { @Override public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { - failIfNotIndexed(); - return rangeQuery( - lowerTerm, - upperTerm, - includeLower, - includeUpper, - (lower, upper) -> InetAddressPoint.newRangeQuery(name(), lower, upper) - ); + failIfNotIndexedAndNoDocValues(); + return rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, (lower, upper) -> { + Query query = InetAddressPoint.newRangeQuery(name(), lower, upper); + if (isSearchable() && hasDocValues()) { + return new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ); + } + if (hasDocValues()) { + return SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ); + } + return query; + }); } /** diff --git a/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java index 1a66037d98d71..0a2435553b19e 100644 --- a/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/IpFieldTypeTests.java @@ -32,10 +32,14 @@ package org.opensearch.index.mapper; import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.PointRangeQuery; +import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.Version; import org.opensearch.cluster.metadata.IndexMetadata; @@ -75,22 +79,41 @@ public void testTermQuery() { MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); String ip = "2001:db8::2:1"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + + Query query = InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)); + + assertEquals( + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ), + ft.termQuery(ip, null) + ); ip = "192.168.1.7"; - assertEquals(InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)), ft.termQuery(ip, null)); + query = InetAddressPoint.newExactQuery("field", InetAddresses.forString(ip)); + assertEquals( + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowExactQuery("field", new BytesRef(((PointRangeQuery) query).getLowerPoint())) + ), + ft.termQuery(ip, null) + ); ip = "2001:db8::2:1"; String prefix = ip + "/64"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64), ft.termQuery(prefix, null)); + + query = InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 64); + assertEquals(query, ft.termQuery(prefix, null)); ip = "192.168.1.7"; prefix = ip + "/16"; - assertEquals(InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16), ft.termQuery(prefix, null)); + query = InetAddressPoint.newPrefixQuery("field", InetAddresses.forString(ip), 16); + assertEquals(query, ft.termQuery(prefix, null)); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap()); + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, false, null, Collections.emptyMap()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> unsearchable.termQuery("::1", null)); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testTermsQuery() { @@ -118,44 +141,123 @@ public void testTermsQuery() { public void testRangeQuery() { MappedFieldType ft = new IpFieldMapper.IpFieldType("field"); - + Query query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, null, randomBoolean(), randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.2.0")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, "192.168.2.0", randomBoolean(), true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("192.168.1.255")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery(null, "192.168.2.0", randomBoolean(), false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", null, true, randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", null, false, randomBoolean(), null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::"), InetAddresses.forString("2001:db8::ffff")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", "2001:db8::ffff", true, true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::1"), InetAddresses.forString("2001:db8::fffe")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("2001:db8::", "2001:db8::ffff", false, false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("2001:db8::2"), InetAddresses.forString("2001:db8::")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("2001:db8::1", "2001:db8::1", false, false, null, null, null, null) ); @@ -178,30 +280,60 @@ public void testRangeQuery() { ) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::"), InetAddresses.forString("::fffe:ffff:ffff")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("::", "0.0.0.0", true, false, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE); assertEquals( - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("::1:0:0:0"), InetAddressPoint.MAX_VALUE), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), // same lo/hi values but inclusive=false so this won't match anything ft.rangeQuery("255.255.255.255", "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", false, true, null, null, null, null) ); + query = InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")); assertEquals( // lower bound is ipv4, upper bound is ipv6 - InetAddressPoint.newRangeQuery("field", InetAddresses.forString("192.168.1.7"), InetAddresses.forString("2001:db8::")), + new IndexOrDocValuesQuery( + query, + SortedSetDocValuesField.newSlowRangeQuery( + ((PointRangeQuery) query).getField(), + new BytesRef(((PointRangeQuery) query).getLowerPoint()), + new BytesRef(((PointRangeQuery) query).getUpperPoint()), + true, + true + ) + ), ft.rangeQuery("::ffff:c0a8:107", "2001:db8::", true, true, null, null, null, null) ); - MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, true, null, Collections.emptyMap()); + MappedFieldType unsearchable = new IpFieldMapper.IpFieldType("field", false, false, false, null, Collections.emptyMap()); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, () -> unsearchable.rangeQuery("::1", "2001::", true, true, null, null, null, null) ); - assertEquals("Cannot search on field [field] since it is not indexed.", e.getMessage()); + assertEquals("Cannot search on field [field] since it is both not indexed, and does not have doc_values enabled.", e.getMessage()); } public void testFetchSourceValue() throws IOException { From c55af6688caf399327e61363188bb9c7a631623b Mon Sep 17 00:00:00 2001 From: Varun Bansal <bansvaru@amazon.com> Date: Tue, 30 Jan 2024 19:04:27 +0530 Subject: [PATCH 141/178] Ensure latest replication checkpoint post failover has correct operational primary term (#11990) * Force update operation primary term in replication checkout post failover Signed-off-by: bansvaru <bansvaru@amazon.com> --- .../remotestore/RemoteStoreRestoreIT.java | 53 +++++++++++++++++++ .../opensearch/index/shard/IndexShard.java | 5 +- .../shard/RemoteStoreRefreshListener.java | 11 ++++ .../RemoteStoreRefreshListenerTests.java | 48 +++++++++++++++++ 4 files changed, 115 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java index 72296a1c01a24..dfa5528eafcf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java @@ -17,6 +17,11 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.unit.ByteSizeUnit; +import org.opensearch.core.index.Index; +import org.opensearch.index.IndexService; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.store.remote.metadata.RemoteSegmentMetadata; +import org.opensearch.indices.IndicesService; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; import org.opensearch.test.InternalTestCluster; @@ -135,6 +140,54 @@ private void testRestoreFlow(int numberOfIterations, boolean invokeFlush, boolea restoreAndVerify(shardCount, 0, indexStats); } + public void testMultipleWriters() throws Exception { + prepareCluster(1, 2, INDEX_NAME, 1, 1); + Map<String, Long> indexStats = indexData(randomIntBetween(2, 5), true, true, INDEX_NAME); + assertEquals(2, getNumShards(INDEX_NAME).totalNumShards); + + // ensure replica has latest checkpoint + flushAndRefresh(INDEX_NAME); + flushAndRefresh(INDEX_NAME); + + Index indexObj = clusterService().state().metadata().indices().get(INDEX_NAME).getIndex(); + IndicesService indicesService = internalCluster().getInstance(IndicesService.class, primaryNodeName(INDEX_NAME)); + IndexService indexService = indicesService.indexService(indexObj); + IndexShard indexShard = indexService.getShard(0); + RemoteSegmentMetadata remoteSegmentMetadataBeforeFailover = indexShard.getRemoteDirectory().readLatestMetadataFile(); + + // ensure all segments synced to replica + assertBusy( + () -> assertHitCount( + client(primaryNodeName(INDEX_NAME)).prepareSearch(INDEX_NAME).setSize(0).get(), + indexStats.get(TOTAL_OPERATIONS) + ), + 30, + TimeUnit.SECONDS + ); + assertBusy( + () -> assertHitCount( + client(replicaNodeName(INDEX_NAME)).prepareSearch(INDEX_NAME).setSize(0).get(), + indexStats.get(TOTAL_OPERATIONS) + ), + 30, + TimeUnit.SECONDS + ); + + String newPrimaryNodeName = replicaNodeName(INDEX_NAME); + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNodeName(INDEX_NAME))); + ensureYellow(INDEX_NAME); + + indicesService = internalCluster().getInstance(IndicesService.class, newPrimaryNodeName); + indexService = indicesService.indexService(indexObj); + indexShard = indexService.getShard(0); + IndexShard finalIndexShard = indexShard; + assertBusy(() -> assertTrue(finalIndexShard.isStartedPrimary() && finalIndexShard.isPrimaryMode())); + assertEquals( + finalIndexShard.getLatestSegmentInfosAndCheckpoint().v2().getPrimaryTerm(), + remoteSegmentMetadataBeforeFailover.getPrimaryTerm() + 1 + ); + } + /** * Helper function to test restoring an index having replicas from remote store when all the nodes housing the primary/replica drop. * @param numberOfIterations Number of times a refresh/flush should be invoked, followed by indexing some data. diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index cbb246219546b..5834eabfa9af0 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -1697,7 +1697,8 @@ ReplicationCheckpoint computeReplicationCheckpoint(SegmentInfos segmentInfos) th } final ReplicationCheckpoint latestReplicationCheckpoint = getLatestReplicationCheckpoint(); if (latestReplicationCheckpoint.getSegmentInfosVersion() == segmentInfos.getVersion() - && latestReplicationCheckpoint.getSegmentsGen() == segmentInfos.getGeneration()) { + && latestReplicationCheckpoint.getSegmentsGen() == segmentInfos.getGeneration() + && latestReplicationCheckpoint.getPrimaryTerm() == getOperationPrimaryTerm()) { return latestReplicationCheckpoint; } final Map<String, StoreFileMetadata> metadataMap = store.getSegmentMetadataMap(segmentInfos); @@ -2014,7 +2015,7 @@ public void close(String reason, boolean flushEngine, boolean deleted) throws IO /* ToDo : Fix this https://github.com/opensearch-project/OpenSearch/issues/8003 */ - private RemoteSegmentStoreDirectory getRemoteDirectory() { + public RemoteSegmentStoreDirectory getRemoteDirectory() { assert indexSettings.isRemoteStoreEnabled(); assert remoteStore.directory() instanceof FilterDirectory : "Store.directory is not an instance of FilterDirectory"; FilterDirectory remoteStoreDirectory = (FilterDirectory) remoteStore.directory(); diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index d96a7e7c95ecf..e96a4bb19a960 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -41,6 +41,7 @@ import java.util.Collection; import java.util.HashMap; import java.util.Iterator; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -209,6 +210,16 @@ private boolean syncSegments() { try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { SegmentInfos segmentInfos = segmentInfosGatedCloseable.get(); final ReplicationCheckpoint checkpoint = indexShard.computeReplicationCheckpoint(segmentInfos); + if (checkpoint.getPrimaryTerm() != indexShard.getOperationPrimaryTerm()) { + throw new IllegalStateException( + String.format( + Locale.ROOT, + "primaryTerm mismatch during segments upload to remote store [%s] != [%s]", + checkpoint.getPrimaryTerm(), + indexShard.getOperationPrimaryTerm() + ) + ); + } // Capture replication checkpoint before uploading the segments as upload can take some time and checkpoint can // move. long lastRefreshedCheckpoint = ((InternalEngine) indexShard.getEngine()).lastRefreshedCheckpoint(); diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index 555284e55a0fa..6567cb03f3dc6 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -425,6 +425,33 @@ public void testTrackerData() throws Exception { assertBusy(() -> assertNoLag(tracker)); } + /** + * Tests segments upload fails with replication checkpoint and replication tracker primary term mismatch + */ + public void testRefreshFailedDueToPrimaryTermMisMatch() throws Exception { + int totalAttempt = 1; + int checkpointPublishSucceedOnAttempt = 0; + // We spy on IndexShard.isPrimaryStarted() to validate that we have tried running remote time as per the expectation. + CountDownLatch refreshCountLatch = new CountDownLatch(totalAttempt); + + // success latch should change as we would be failed primary term latest validation. + CountDownLatch successLatch = new CountDownLatch(1); + CountDownLatch reachedCheckpointPublishLatch = new CountDownLatch(0); + Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> tuple = mockIndexShardWithRetryAndScheduleRefresh( + totalAttempt, + refreshCountLatch, + successLatch, + checkpointPublishSucceedOnAttempt, + reachedCheckpointPublishLatch, + false + ); + + assertBusy(() -> assertEquals(1, tuple.v2().getRemoteSegmentTransferTracker(indexShard.shardId()).getTotalUploadsFailed())); + assertBusy(() -> assertEquals(0, refreshCountLatch.getCount())); + assertBusy(() -> assertEquals(1, successLatch.getCount())); + assertBusy(() -> assertEquals(0, reachedCheckpointPublishLatch.getCount())); + } + private void assertNoLag(RemoteSegmentTransferTracker tracker) { assertEquals(0, tracker.getRefreshSeqNoLag()); assertEquals(0, tracker.getBytesLag()); @@ -460,6 +487,24 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn CountDownLatch successLatch, int succeedCheckpointPublishOnAttempt, CountDownLatch reachedCheckpointPublishLatch + ) throws IOException { + return mockIndexShardWithRetryAndScheduleRefresh( + succeedOnAttempt, + refreshCountLatch, + successLatch, + succeedCheckpointPublishOnAttempt, + reachedCheckpointPublishLatch, + true + ); + } + + private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIndexShardWithRetryAndScheduleRefresh( + int succeedOnAttempt, + CountDownLatch refreshCountLatch, + CountDownLatch successLatch, + int succeedCheckpointPublishOnAttempt, + CountDownLatch reachedCheckpointPublishLatch, + boolean mockPrimaryTerm ) throws IOException { // Create index shard that we will be using to mock different methods in IndexShard for the unit test indexShard = newStartedShard( @@ -500,6 +545,9 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn when(remoteStore.directory()).thenReturn(remoteStoreFilterDirectory); // Mock indexShard.getOperationPrimaryTerm() + if (mockPrimaryTerm) { + when(shard.getOperationPrimaryTerm()).thenReturn(indexShard.getOperationPrimaryTerm()); + } when(shard.getLatestReplicationCheckpoint()).thenReturn(indexShard.getLatestReplicationCheckpoint()); // Mock indexShard.routingEntry().primary() From 0a16549b8d82b1e6a4a3feeccd598c3f86892130 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Tue, 30 Jan 2024 11:59:41 -0500 Subject: [PATCH 142/178] The telemetry-otel should not publish any client JARs (since it does not expose client facing APIs) (#12085) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- plugins/telemetry-otel/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/telemetry-otel/build.gradle b/plugins/telemetry-otel/build.gradle index 9be83e30c3183..735cbd92b691a 100644 --- a/plugins/telemetry-otel/build.gradle +++ b/plugins/telemetry-otel/build.gradle @@ -16,7 +16,7 @@ apply plugin: 'opensearch.internal-cluster-test' opensearchplugin { description 'Opentelemetry based telemetry implementation.' classname 'org.opensearch.telemetry.OTelTelemetryPlugin' - hasClientJar = true + hasClientJar = false } dependencies { From 64c00b33797f82e73e17dac519ba4b31008c3dd1 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Tue, 30 Jan 2024 11:41:54 -0800 Subject: [PATCH 143/178] Flaky testRelocateWhileContinuouslyIndexingAndWaitingForRefresh test: Enable trace logs on replication and recovery (#12088) Signed-off-by: Suraj Singh <surajrider@gmail.com> --- .../indices/replication/SegmentReplicationRelocationIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java index dbe0b43441f54..a7be63bc61bc2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationRelocationIT.java @@ -219,6 +219,7 @@ public void testPrimaryRelocationWithSegRepFailure() throws Exception { * This test verifies primary recovery behavior with continuous ingestion * */ + @TestLogging(reason = "Enable trace logs from replication and recovery package", value = "org.opensearch.indices.recovery:TRACE,org.opensearch.indices.replication:TRACE") public void testRelocateWhileContinuouslyIndexingAndWaitingForRefresh() throws Exception { final String primary = internalCluster().startNode(); createIndex(1); From 8ed66aeed4c64fd015501f882ba1d5ad12f6e155 Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Tue, 30 Jan 2024 11:43:23 -0800 Subject: [PATCH 144/178] Move Google Application Default Credentials CHANGELOG entry to 2.x (#12086) This entry will be released in 2.12 so it should not be in the 3.0 section. Signed-off-by: Andrew Ross <andrross@amazon.com> --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d0feea5e2ded3..ee933d1c820a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,7 +59,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add task completion count in search backpressure stats API ([#10028](https://github.com/opensearch-project/OpenSearch/pull/10028/)) - Deprecate CamelCase `PathHierarchy` tokenizer name in favor to lowercase `path_hierarchy` ([#10894](https://github.com/opensearch-project/OpenSearch/pull/10894)) - Switched to more reliable OpenSearch Lucene snapshot location([#11728](https://github.com/opensearch-project/OpenSearch/pull/11728)) -- Added support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) ### Deprecated @@ -135,6 +134,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) - Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) - New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) +- Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) From f9ab80176a2555f1117fd4ec387366590fe6e300 Mon Sep 17 00:00:00 2001 From: Suraj Singh <surajrider@gmail.com> Date: Tue, 30 Jan 2024 11:59:49 -0800 Subject: [PATCH 145/178] Enable cluster and replication package debug logging for testSingleIndexShardAllocation test (#11878) Signed-off-by: Suraj Singh <surajrider@gmail.com> --- .../indices/replication/SegmentReplicationAllocationIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java index f485d4e402b41..30edea6551067 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationAllocationIT.java @@ -22,6 +22,7 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.junit.annotations.TestLogging; import java.util.ArrayList; import java.util.List; @@ -128,6 +129,7 @@ public void testPerIndexPrimaryAllocation() throws Exception { * ensures the primary shard distribution is balanced. * */ + @TestLogging(reason = "Enable debug logs from cluster and index replication package", value = "org.opensearch.cluster:DEBUG,org.opensearch.indices.replication:DEBUG") public void testSingleIndexShardAllocation() throws Exception { internalCluster().startClusterManagerOnlyNode(); final int maxReplicaCount = 1; From fb2c5f21868ee364170363d881329b6e0afdfdb3 Mon Sep 17 00:00:00 2001 From: David Zane <38449481+dzane17@users.noreply.github.com> Date: Tue, 30 Jan 2024 12:01:34 -0800 Subject: [PATCH 146/178] [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart (#12035) Signed-off-by: David Zane <davizane@amazon.com> --- CHANGELOG.md | 1 + .../search/AbstractSearchAsyncAction.java | 14 ++++--- .../action/search/SearchPhaseName.java | 13 ++++++ .../action/search/TransportSearchAction.java | 2 +- .../AbstractSearchAsyncActionTests.java | 41 ++++++++++++++++--- 5 files changed, 60 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ee933d1c820a9..f9c2eda92ec73 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -85,6 +85,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Update supported version for max_shard_size parameter in Shrink API ([#11439](https://github.com/opensearch-project/OpenSearch/pull/11439)) - Fix typo in API annotation check message ([11836](https://github.com/opensearch-project/OpenSearch/pull/11836)) - Update supported version for must_exist parameter in update aliases API ([#11872](https://github.com/opensearch-project/OpenSearch/pull/11872)) +- [Bug] Check phase name before SearchRequestOperationsListener onPhaseStart ([#12035](https://github.com/opensearch-project/OpenSearch/pull/12035)) ### Security diff --git a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java index 5b41c2a13b596..3c27d3ce59e4c 100644 --- a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java @@ -432,16 +432,18 @@ public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPha } private void onPhaseEnd(SearchRequestContext searchRequestContext) { - if (getCurrentPhase() != null) { + if (getCurrentPhase() != null && SearchPhaseName.isValidName(getName())) { long tookInNanos = System.nanoTime() - getCurrentPhase().getStartTimeInNanos(); searchRequestContext.updatePhaseTookMap(getCurrentPhase().getName(), TimeUnit.NANOSECONDS.toMillis(tookInNanos)); + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseEnd(this, searchRequestContext); } - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseEnd(this, searchRequestContext); } - private void onPhaseStart(SearchPhase phase) { + void onPhaseStart(SearchPhase phase) { setCurrentPhase(phase); - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseStart(this); + if (SearchPhaseName.isValidName(phase.getName())) { + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseStart(this); + } } private void onRequestEnd(SearchRequestContext searchRequestContext) { @@ -714,7 +716,9 @@ public void sendSearchResponse(InternalSearchResponse internalSearchResponse, At @Override public final void onPhaseFailure(SearchPhase phase, String msg, Throwable cause) { - this.searchRequestContext.getSearchRequestOperationsListener().onPhaseFailure(this); + if (SearchPhaseName.isValidName(phase.getName())) { + this.searchRequestContext.getSearchRequestOperationsListener().onPhaseFailure(this); + } raisePhaseFailure(new SearchPhaseExecutionException(phase.getName(), msg, cause, buildShardFailures())); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java b/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java index 8cf92934c8a52..c6f3d4c70632d 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java +++ b/server/src/main/java/org/opensearch/action/search/SearchPhaseName.java @@ -10,6 +10,9 @@ import org.opensearch.common.annotation.PublicApi; +import java.util.HashSet; +import java.util.Set; + /** * Enum for different Search Phases in OpenSearch * @@ -25,6 +28,12 @@ public enum SearchPhaseName { CAN_MATCH("can_match"); private final String name; + private static final Set<String> PHASE_NAMES = new HashSet<>(); + static { + for (SearchPhaseName phaseName : SearchPhaseName.values()) { + PHASE_NAMES.add(phaseName.name); + } + } SearchPhaseName(final String name) { this.name = name; @@ -33,4 +42,8 @@ public enum SearchPhaseName { public String getName() { return name; } + + public static boolean isValidName(String phaseName) { + return PHASE_NAMES.contains(phaseName); + } } diff --git a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java index 842c10b700d24..79e599ec9387b 100644 --- a/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/opensearch/action/search/TransportSearchAction.java @@ -1238,7 +1238,7 @@ private AbstractSearchAsyncAction<? extends SearchPhaseResult> searchAsyncAction clusters, searchRequestContext ); - return new SearchPhase(action.getName()) { + return new SearchPhase("none") { @Override public void run() { action.start(); diff --git a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java index 76129341fc9a2..a7cbbffc51ed4 100644 --- a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java @@ -338,7 +338,14 @@ public void testOnPhaseFailureAndVerifyListeners() { SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); action.start(); assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); - action.onPhaseFailure(new SearchPhase("test") { + action.onPhaseFailure(new SearchPhase("none") { + @Override + public void run() { + + } + }, "message", null); + assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); + action.onPhaseFailure(new SearchPhase(action.getName()) { @Override public void run() { @@ -352,14 +359,14 @@ public void run() { ); searchDfsQueryThenFetchAsyncAction.start(); assertEquals(1, testListener.getPhaseCurrent(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); - searchDfsQueryThenFetchAsyncAction.onPhaseFailure(new SearchPhase("test") { + searchDfsQueryThenFetchAsyncAction.onPhaseFailure(new SearchPhase(searchDfsQueryThenFetchAsyncAction.getName()) { @Override public void run() { } }, "message", null); - assertEquals(0, testListener.getPhaseCurrent(action.getSearchPhaseName())); - assertEquals(0, testListener.getPhaseTotal(action.getSearchPhaseName())); + assertEquals(0, testListener.getPhaseCurrent(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); + assertEquals(0, testListener.getPhaseTotal(searchDfsQueryThenFetchAsyncAction.getSearchPhaseName())); FetchSearchPhase fetchPhase = createFetchSearchPhase(); ShardId shardId = new ShardId(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLength(10), randomInt()); @@ -368,7 +375,7 @@ public void run() { action.skipShard(searchShardIterator); action.executeNextPhase(action, fetchPhase); assertEquals(1, testListener.getPhaseCurrent(fetchPhase.getSearchPhaseName())); - action.onPhaseFailure(new SearchPhase("test") { + action.onPhaseFailure(new SearchPhase(fetchPhase.getName()) { @Override public void run() { @@ -403,6 +410,30 @@ public void run() { assertEquals(requestIds, releasedContexts); } + public void testOnPhaseStart() { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + SearchRequestStats testListener = new SearchRequestStats(clusterSettings); + + final List<SearchRequestOperationsListener> requestOperationListeners = new ArrayList<>(List.of(testListener)); + SearchQueryThenFetchAsyncAction action = createSearchQueryThenFetchAsyncAction(requestOperationListeners); + + action.onPhaseStart(new SearchPhase("test") { + @Override + public void run() {} + }); + action.onPhaseStart(new SearchPhase("none") { + @Override + public void run() {} + }); + assertEquals(0, testListener.getPhaseCurrent(action.getSearchPhaseName())); + + action.onPhaseStart(new SearchPhase(action.getName()) { + @Override + public void run() {} + }); + assertEquals(1, testListener.getPhaseCurrent(action.getSearchPhaseName())); + } + public void testShardNotAvailableWithDisallowPartialFailures() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); AtomicReference<Exception> exception = new AtomicReference<>(); From 11644d5bbd6fe53c2b2b46214688fe05d78de433 Mon Sep 17 00:00:00 2001 From: Marc Handalian <handalm@amazon.com> Date: Tue, 30 Jan 2024 13:52:42 -0800 Subject: [PATCH 147/178] Remove compounding retries within PrimaryShardReplicationSource (#12043) This change removes retries within PrimaryShardReplicationSource and relies on retries in one place at the start of replication. This is done within SegmentReplicationTargetService's processLatestReceivedCheckpoint after a failure/success occurs. The timeout on these retries is the cause of flaky failures from SegmentReplication's bwc test within IndexingIT, that can occur on node disconnect. The retries will persist for over ~1m to the same primary node that has been relocated/shut down and cause the test to timeout. This change also includes simplifications to the cancellation flow on the target service before the shard is closed. Previously we "request" a cancel that does not remove the target from the ongoing replications collection until a cancellation failure is thrown. The transport calls from PrimaryShardReplicationSource are no longer wrapped in CancellableThreads by the client so a call to "cancel" will not throw. Instead we now immediately remove the target and decref/close it. Signed-off-by: Marc Handalian <marc.handalian@gmail.com> --- .../org/opensearch/upgrades/IndexingIT.java | 1 - .../replication/SegmentReplicationIT.java | 61 +++++++++++++++++++ ...plicationUsingRemoteStoreDisruptionIT.java | 53 +++++++--------- .../PrimaryShardReplicationSource.java | 46 ++++++-------- .../replication/SegmentReplicationTarget.java | 22 +++---- .../SegmentReplicationTargetService.java | 28 ++++++--- .../PrimaryShardReplicationSourceTests.java | 38 ------------ .../SegmentReplicationTargetServiceTests.java | 7 ++- 8 files changed, 133 insertions(+), 123 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index f963f8d221bb5..1577260e145d4 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -262,7 +262,6 @@ public void testIndexing() throws Exception { * @throws Exception if index creation fail * @throws UnsupportedOperationException if cluster type is unknown */ - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/7679") public void testIndexingWithSegRep() throws Exception { if (UPGRADE_FROM_VERSION.before(Version.V_2_4_0)) { logger.info("--> Skip test for version {} where segment replication feature is not available", UPGRADE_FROM_VERSION); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java index 5511bc7945d65..4a848e92800cb 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationIT.java @@ -594,6 +594,67 @@ public void testCancellation() throws Exception { assertDocCounts(docCount, primaryNode); } + public void testCancellationDuringGetCheckpointInfo() throws Exception { + cancelDuringReplicaAction(SegmentReplicationSourceService.Actions.GET_CHECKPOINT_INFO); + } + + public void testCancellationDuringGetSegments() throws Exception { + cancelDuringReplicaAction(SegmentReplicationSourceService.Actions.GET_SEGMENT_FILES); + } + + private void cancelDuringReplicaAction(String actionToblock) throws Exception { + // this test stubs transport calls specific to node-node replication. + assumeFalse( + "Skipping the test as its not compatible with segment replication with remote store.", + segmentReplicationWithRemoteEnabled() + ); + final String primaryNode = internalCluster().startDataOnlyNode(); + createIndex(INDEX_NAME, Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build()); + ensureYellow(INDEX_NAME); + + final String replicaNode = internalCluster().startDataOnlyNode(); + ensureGreen(INDEX_NAME); + final SegmentReplicationTargetService targetService = internalCluster().getInstance( + SegmentReplicationTargetService.class, + replicaNode + ); + final IndexShard replicaShard = getIndexShard(replicaNode, INDEX_NAME); + CountDownLatch startCancellationLatch = new CountDownLatch(1); + CountDownLatch latch = new CountDownLatch(1); + + MockTransportService primaryTransportService = (MockTransportService) internalCluster().getInstance( + TransportService.class, + primaryNode + ); + primaryTransportService.addRequestHandlingBehavior(actionToblock, (handler, request, channel, task) -> { + logger.info("action {}", actionToblock); + try { + startCancellationLatch.countDown(); + latch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + + // index a doc and trigger replication + client().prepareIndex(INDEX_NAME).setId("1").setSource("foo", "bar").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); + + // remove the replica and ensure it is cleaned up. + startCancellationLatch.await(); + SegmentReplicationTarget target = targetService.get(replicaShard.shardId()); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertEquals("Replication not closed: " + target.getId(), 0, target.refCount()); + assertEquals("Store has a positive refCount", 0, replicaShard.store().refCount()); + // stop the replica, this will do additional checks on shutDown to ensure the replica and its store are closed properly + internalCluster().stopRandomNode(InternalTestCluster.nameFilter(replicaNode)); + latch.countDown(); + } + public void testStartReplicaAfterPrimaryIndexesDocs() throws Exception { final String primaryNode = internalCluster().startDataOnlyNode(); createIndex(INDEX_NAME, Settings.builder().put(indexSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java index 8372135fc55c4..3d8d001b17ddf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/SegmentReplicationUsingRemoteStoreDisruptionIT.java @@ -23,8 +23,6 @@ import org.opensearch.indices.replication.SegmentReplicationState; import org.opensearch.indices.replication.SegmentReplicationTarget; import org.opensearch.indices.replication.SegmentReplicationTargetService; -import org.opensearch.indices.replication.common.ReplicationCollection; -import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.disruption.SlowClusterStateProcessing; @@ -33,6 +31,8 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + /** * This class runs tests with remote store + segRep while blocking file downloads */ @@ -59,22 +59,18 @@ public void testCancelReplicationWhileSyncingSegments() throws Exception { indexSingleDoc(); refresh(INDEX_NAME); waitForBlock(replicaNode, REPOSITORY_NAME, TimeValue.timeValueSeconds(10)); - final SegmentReplicationState state = targetService.getOngoingEventSegmentReplicationState(indexShard.shardId()); - assertEquals(SegmentReplicationState.Stage.GET_FILES, state.getStage()); - ReplicationCollection.ReplicationRef<SegmentReplicationTarget> segmentReplicationTargetReplicationRef = targetService.get( - state.getReplicationId() - ); - final SegmentReplicationTarget segmentReplicationTarget = segmentReplicationTargetReplicationRef.get(); - // close the target ref here otherwise it will hold a refcount - segmentReplicationTargetReplicationRef.close(); + SegmentReplicationTarget segmentReplicationTarget = targetService.get(indexShard.shardId()); assertNotNull(segmentReplicationTarget); + assertEquals(SegmentReplicationState.Stage.GET_FILES, segmentReplicationTarget.state().getStage()); assertTrue(segmentReplicationTarget.refCount() > 0); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode)); - assertBusy(() -> { - assertTrue(indexShard.routingEntry().primary()); - assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); - assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); - }); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); + assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); unblockNode(REPOSITORY_NAME, replicaNode); cleanupRepo(); } @@ -85,7 +81,6 @@ public void testCancelReplicationWhileFetchingMetadata() throws Exception { final Set<String> dataNodeNames = internalCluster().getDataNodeNames(); final String replicaNode = getNode(dataNodeNames, false); - final String primaryNode = getNode(dataNodeNames, true); SegmentReplicationTargetService targetService = internalCluster().getInstance(SegmentReplicationTargetService.class, replicaNode); ensureGreen(INDEX_NAME); @@ -94,22 +89,18 @@ public void testCancelReplicationWhileFetchingMetadata() throws Exception { indexSingleDoc(); refresh(INDEX_NAME); waitForBlock(replicaNode, REPOSITORY_NAME, TimeValue.timeValueSeconds(10)); - final SegmentReplicationState state = targetService.getOngoingEventSegmentReplicationState(indexShard.shardId()); - assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, state.getStage()); - ReplicationCollection.ReplicationRef<SegmentReplicationTarget> segmentReplicationTargetReplicationRef = targetService.get( - state.getReplicationId() - ); - final SegmentReplicationTarget segmentReplicationTarget = segmentReplicationTargetReplicationRef.get(); - // close the target ref here otherwise it will hold a refcount - segmentReplicationTargetReplicationRef.close(); + SegmentReplicationTarget segmentReplicationTarget = targetService.get(indexShard.shardId()); assertNotNull(segmentReplicationTarget); + assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, segmentReplicationTarget.state().getStage()); assertTrue(segmentReplicationTarget.refCount() > 0); - internalCluster().stopRandomNode(InternalTestCluster.nameFilter(primaryNode)); - assertBusy(() -> { - assertTrue(indexShard.routingEntry().primary()); - assertNull(targetService.getOngoingEventSegmentReplicationState(indexShard.shardId())); - assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); - }); + assertAcked( + client().admin() + .indices() + .prepareUpdateSettings(INDEX_NAME) + .setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)) + ); + assertNull(targetService.get(indexShard.shardId())); + assertEquals("Target should be closed", 0, segmentReplicationTarget.refCount()); unblockNode(REPOSITORY_NAME, replicaNode); cleanupRepo(); } diff --git a/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java b/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java index 02fc8feefd698..a17779810239a 100644 --- a/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java +++ b/server/src/main/java/org/opensearch/indices/replication/PrimaryShardReplicationSource.java @@ -8,16 +8,14 @@ package org.opensearch.indices.replication; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; +import org.opensearch.action.ActionListenerResponseHandler; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.core.action.ActionListener; -import org.opensearch.core.common.io.stream.Writeable; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.store.StoreFileMetadata; import org.opensearch.indices.recovery.RecoverySettings; -import org.opensearch.indices.recovery.RetryableTransportClient; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportService; @@ -35,9 +33,7 @@ */ public class PrimaryShardReplicationSource implements SegmentReplicationSource { - private static final Logger logger = LogManager.getLogger(PrimaryShardReplicationSource.class); - - private final RetryableTransportClient transportClient; + private final TransportService transportService; private final DiscoveryNode sourceNode; private final DiscoveryNode targetNode; @@ -52,12 +48,7 @@ public PrimaryShardReplicationSource( DiscoveryNode sourceNode ) { this.targetAllocationId = targetAllocationId; - this.transportClient = new RetryableTransportClient( - transportService, - sourceNode, - recoverySettings.internalActionRetryTimeout(), - logger - ); + this.transportService = transportService; this.sourceNode = sourceNode; this.targetNode = targetNode; this.recoverySettings = recoverySettings; @@ -69,10 +60,14 @@ public void getCheckpointMetadata( ReplicationCheckpoint checkpoint, ActionListener<CheckpointInfoResponse> listener ) { - final Writeable.Reader<CheckpointInfoResponse> reader = CheckpointInfoResponse::new; - final ActionListener<CheckpointInfoResponse> responseListener = ActionListener.map(listener, r -> r); final CheckpointInfoRequest request = new CheckpointInfoRequest(replicationId, targetAllocationId, targetNode, checkpoint); - transportClient.executeRetryableAction(GET_CHECKPOINT_INFO, request, responseListener, reader); + transportService.sendRequest( + sourceNode, + GET_CHECKPOINT_INFO, + request, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionRetryTimeout()).build(), + new ActionListenerResponseHandler<>(listener, CheckpointInfoResponse::new, ThreadPool.Names.GENERIC) + ); } @Override @@ -88,8 +83,6 @@ public void getSegmentFiles( // MultiFileWriter takes care of progress tracking for downloads in this scenario // TODO: Move state management and tracking into replication methods and use chunking and data // copy mechanisms only from MultiFileWriter - final Writeable.Reader<GetSegmentFilesResponse> reader = GetSegmentFilesResponse::new; - final ActionListener<GetSegmentFilesResponse> responseListener = ActionListener.map(listener, r -> r); final GetSegmentFilesRequest request = new GetSegmentFilesRequest( replicationId, targetAllocationId, @@ -97,20 +90,17 @@ public void getSegmentFiles( filesToFetch, checkpoint ); - final TransportRequestOptions options = TransportRequestOptions.builder() - .withTimeout(recoverySettings.internalActionLongTimeout()) - .build(); - transportClient.executeRetryableAction(GET_SEGMENT_FILES, request, options, responseListener, reader); + transportService.sendRequest( + sourceNode, + GET_SEGMENT_FILES, + request, + TransportRequestOptions.builder().withTimeout(recoverySettings.internalActionLongTimeout()).build(), + new ActionListenerResponseHandler<>(listener, GetSegmentFilesResponse::new, ThreadPool.Names.GENERIC) + ); } @Override public String getDescription() { return sourceNode.getName(); } - - @Override - public void cancel() { - transportClient.cancel(); - } - } diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java index cc71ef816e525..af764556b7549 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTarget.java @@ -83,6 +83,16 @@ protected void closeInternal() { } } + @Override + protected void onCancel(String reason) { + try { + notifyListener(new ReplicationFailedException(reason), false); + } finally { + source.cancel(); + cancellableThreads.cancel(reason); + } + } + @Override protected String getPrefix() { return REPLICATION_PREFIX + UUIDs.randomBase64UUID() + "."; @@ -320,16 +330,4 @@ private void finalizeReplication(CheckpointInfoResponse checkpointInfoResponse) } } } - - /** - * Trigger a cancellation, this method will not close the target a subsequent call to #fail is required from target service. - */ - @Override - public void cancel(String reason) { - if (finished.get() == false) { - logger.trace(new ParameterizedMessage("Cancelling replication for target {}", description())); - cancellableThreads.cancel(reason); - source.cancel(); - } - } } diff --git a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java index d6db154a4e0e3..f28f829545d59 100644 --- a/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java +++ b/server/src/main/java/org/opensearch/indices/replication/SegmentReplicationTargetService.java @@ -84,10 +84,6 @@ public class SegmentReplicationTargetService extends AbstractLifecycleComponent private final ClusterService clusterService; private final TransportService transportService; - public ReplicationRef<SegmentReplicationTarget> get(long replicationId) { - return onGoingReplications.get(replicationId); - } - /** * The internal actions * @@ -158,6 +154,7 @@ protected void doStart() { @Override protected void doStop() { if (DiscoveryNode.isDataNode(clusterService.getSettings())) { + assert onGoingReplications.size() == 0 : "Replication collection should be empty on shutdown"; clusterService.removeListener(this); } } @@ -201,7 +198,7 @@ public void clusterChanged(ClusterChangedEvent event) { @Override public void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { if (indexShard != null && indexShard.indexSettings().isSegRepEnabled()) { - onGoingReplications.requestCancel(indexShard.shardId(), "Shard closing"); + onGoingReplications.cancelForShard(indexShard.shardId(), "Shard closing"); latestReceivedCheckpoint.remove(shardId); } } @@ -223,7 +220,7 @@ public void afterIndexShardStarted(IndexShard indexShard) { @Override public void shardRoutingChanged(IndexShard indexShard, @Nullable ShardRouting oldRouting, ShardRouting newRouting) { if (oldRouting != null && indexShard.indexSettings().isSegRepEnabled() && oldRouting.primary() == false && newRouting.primary()) { - onGoingReplications.requestCancel(indexShard.shardId(), "Shard has been promoted to primary"); + onGoingReplications.cancelForShard(indexShard.shardId(), "Shard has been promoted to primary"); latestReceivedCheckpoint.remove(indexShard.shardId()); } } @@ -255,6 +252,14 @@ public SegmentReplicationState getSegmentReplicationState(ShardId shardId) { .orElseGet(() -> getlatestCompletedEventSegmentReplicationState(shardId)); } + public ReplicationRef<SegmentReplicationTarget> get(long replicationId) { + return onGoingReplications.get(replicationId); + } + + public SegmentReplicationTarget get(ShardId shardId) { + return onGoingReplications.getOngoingReplicationTarget(shardId); + } + /** * Invoked when a new checkpoint is received from a primary shard. * It checks if a new checkpoint should be processed or not and starts replication if needed. @@ -454,7 +459,13 @@ protected boolean processLatestReceivedCheckpoint(IndexShard replicaShard, Threa latestPublishedCheckpoint ) ); - Runnable runnable = () -> onNewCheckpoint(latestReceivedCheckpoint.get(replicaShard.shardId()), replicaShard); + Runnable runnable = () -> { + // if we retry ensure the shard is not in the process of being closed. + // it will be removed from indexService's collection before the shard is actually marked as closed. + if (indicesService.getShardOrNull(replicaShard.shardId()) != null) { + onNewCheckpoint(latestReceivedCheckpoint.get(replicaShard.shardId()), replicaShard); + } + }; // Checks if we are using same thread and forks if necessary. if (thread == Thread.currentThread()) { threadPool.generic().execute(runnable); @@ -548,9 +559,6 @@ public ReplicationRunner(long replicationId) { @Override public void onFailure(Exception e) { - try (final ReplicationRef<SegmentReplicationTarget> ref = onGoingReplications.get(replicationId)) { - logger.error(() -> new ParameterizedMessage("Error during segment replication, {}", ref.get().description()), e); - } onGoingReplications.fail(replicationId, new ReplicationFailedException("Unexpected Error during replication", e), false); } diff --git a/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java b/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java index e4dd32e5c6f70..2531790ede4af 100644 --- a/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/PrimaryShardReplicationSourceTests.java @@ -15,7 +15,6 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.CancellableThreads; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.action.ActionListener; import org.opensearch.index.shard.IndexShard; @@ -27,12 +26,9 @@ import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.transport.CapturingTransport; import org.opensearch.transport.TransportService; -import org.junit.Assert; import java.util.Arrays; import java.util.Collections; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; import static org.mockito.Mockito.mock; @@ -165,40 +161,6 @@ public void testTransportTimeoutForGetSegmentFilesAction() { assertEquals(recoverySettings.internalActionLongTimeout(), capturedRequest.options.timeout()); } - public void testGetSegmentFiles_CancelWhileRequestOpen() throws InterruptedException { - CountDownLatch latch = new CountDownLatch(1); - final ReplicationCheckpoint checkpoint = new ReplicationCheckpoint( - indexShard.shardId(), - PRIMARY_TERM, - SEGMENTS_GEN, - VERSION, - Codec.getDefault().getName() - ); - StoreFileMetadata testMetadata = new StoreFileMetadata("testFile", 1L, "checksum", Version.LATEST); - replicationSource.getSegmentFiles( - REPLICATION_ID, - checkpoint, - Arrays.asList(testMetadata), - mock(IndexShard.class), - (fileName, bytesRecovered) -> {}, - new ActionListener<>() { - @Override - public void onResponse(GetSegmentFilesResponse getSegmentFilesResponse) { - Assert.fail("onFailure response expected."); - } - - @Override - public void onFailure(Exception e) { - assertEquals(e.getClass(), CancellableThreads.ExecutionCancelledException.class); - latch.countDown(); - } - } - ); - replicationSource.cancel(); - latch.await(2, TimeUnit.SECONDS); - assertEquals("listener should have resolved in a failure", 0, latch.getCount()); - } - private DiscoveryNode newDiscoveryNode(String nodeName) { return new DiscoveryNode( nodeName, diff --git a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java index f284a425a417b..3c72dda2d8b5d 100644 --- a/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java +++ b/server/src/test/java/org/opensearch/indices/replication/SegmentReplicationTargetServiceTests.java @@ -620,6 +620,7 @@ public void testForceSegmentSyncHandlerWithFailure_AlreadyClosedException_swallo } public void testTargetCancelledBeforeStartInvoked() { + final String cancelReason = "test"; final SegmentReplicationTarget target = new SegmentReplicationTarget( replicaShard, primaryShard.getLatestReplicationCheckpoint(), @@ -633,12 +634,12 @@ public void onReplicationDone(SegmentReplicationState state) { @Override public void onReplicationFailure(SegmentReplicationState state, ReplicationFailedException e, boolean sendShardFailure) { // failures leave state object in last entered stage. - assertEquals(SegmentReplicationState.Stage.GET_CHECKPOINT_INFO, state.getStage()); - assertTrue(e.getCause() instanceof CancellableThreads.ExecutionCancelledException); + assertEquals(SegmentReplicationState.Stage.INIT, state.getStage()); + assertEquals(cancelReason, e.getMessage()); } } ); - target.cancel("test"); + target.cancel(cancelReason); sut.startReplication(target); } From 7526f8608adf5aa468b75db7ea99d3ab72a468fa Mon Sep 17 00:00:00 2001 From: Thomas Seidl <drunken-monkey@users.noreply.github.com> Date: Tue, 30 Jan 2024 23:37:09 +0100 Subject: [PATCH 148/178] Fixed typo in DEVELOPER_GUIDE.md. (#12051) Signed-off-by: Thomas Seidl <drunken-monkey@users.noreply.github.com> --- DEVELOPER_GUIDE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 21adbb0305ab1..f0851fc58d444 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -348,7 +348,7 @@ Please follow these formatting guidelines: * Wildcard imports (`import foo.bar.baz.*`) are forbidden and will cause the build to fail. * If *absolutely* necessary, you can disable formatting for regions of code with the `// tag::NAME` and `// end::NAME` directives, but note that these are intended for use in documentation, so please make it clear what you have done, and only do this where the benefit clearly outweighs the decrease in consistency. * Note that JavaDoc and block comments i.e. `/* ... */` are not formatted, but line comments i.e `// ...` are. -* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, if might get called out in PR reviews as something to change. +* There is an implicit rule that negative boolean expressions should use the form `foo == false` instead of `!foo` for better readability of the code. While this isn't strictly enforced, it might get called out in PR reviews as something to change. ## Adding Dependencies From 2f8d267adbdfef175004912e877bdbd0814f27f1 Mon Sep 17 00:00:00 2001 From: Poojita Raj <poojiraj@amazon.com> Date: Tue, 30 Jan 2024 16:15:41 -0800 Subject: [PATCH 149/178] Fix flaky test SegmentReplicationStatsIT.testMultipleIndices (#12070) Signed-off-by: Poojita Raj <poojiraj@amazon.com> --- .../SegmentReplicationStatsIT.java | 61 ++++++++++--------- 1 file changed, 32 insertions(+), 29 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java index 766471fdc0756..89aef6f0be1a6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationStatsIT.java @@ -268,12 +268,12 @@ public void testMultipleIndices() throws Exception { internalCluster().startClusterManagerOnlyNode(); final String index_2 = "tst-index-2"; List<String> nodes = new ArrayList<>(); - final String primaryNode = internalCluster().startNode(); + final String primaryNode = internalCluster().startDataOnlyNode(); nodes.add(primaryNode); createIndex(INDEX_NAME, index_2); ensureYellowAndNoInitializingShards(INDEX_NAME, index_2); - nodes.add(internalCluster().startNode()); + nodes.add(internalCluster().startDataOnlyNode()); ensureGreen(INDEX_NAME, index_2); final long numDocs = scaledRandomIntBetween(50, 100); @@ -284,6 +284,7 @@ public void testMultipleIndices() throws Exception { refresh(INDEX_NAME, index_2); waitForSearchableDocs(INDEX_NAME, numDocs, nodes); waitForSearchableDocs(index_2, numDocs, nodes); + ensureSearchable(INDEX_NAME, index_2); final IndexShard index_1_primary = getIndexShard(primaryNode, INDEX_NAME); final IndexShard index_2_primary = getIndexShard(primaryNode, index_2); @@ -291,37 +292,39 @@ public void testMultipleIndices() throws Exception { assertTrue(index_1_primary.routingEntry().primary()); assertTrue(index_2_primary.routingEntry().primary()); - // test both indices are returned in the response. - SegmentReplicationStatsResponse segmentReplicationStatsResponse = client().admin() - .indices() - .prepareSegmentReplicationStats() - .execute() - .actionGet(); + assertBusy(() -> { + // test both indices are returned in the response. + SegmentReplicationStatsResponse segmentReplicationStatsResponse = dataNodeClient().admin() + .indices() + .prepareSegmentReplicationStats() + .execute() + .actionGet(); - Map<String, List<SegmentReplicationPerGroupStats>> replicationStats = segmentReplicationStatsResponse.getReplicationStats(); - assertEquals(2, replicationStats.size()); - List<SegmentReplicationPerGroupStats> replicationPerGroupStats = replicationStats.get(INDEX_NAME); - assertEquals(1, replicationPerGroupStats.size()); - SegmentReplicationPerGroupStats perGroupStats = replicationPerGroupStats.get(0); - assertEquals(perGroupStats.getShardId(), index_1_primary.shardId()); - Set<SegmentReplicationShardStats> replicaStats = perGroupStats.getReplicaStats(); - assertEquals(1, replicaStats.size()); - for (SegmentReplicationShardStats replica : replicaStats) { - assertNotNull(replica.getCurrentReplicationState()); - } + Map<String, List<SegmentReplicationPerGroupStats>> replicationStats = segmentReplicationStatsResponse.getReplicationStats(); + assertEquals(2, replicationStats.size()); + List<SegmentReplicationPerGroupStats> replicationPerGroupStats = replicationStats.get(INDEX_NAME); + assertEquals(1, replicationPerGroupStats.size()); + SegmentReplicationPerGroupStats perGroupStats = replicationPerGroupStats.get(0); + assertEquals(perGroupStats.getShardId(), index_1_primary.shardId()); + Set<SegmentReplicationShardStats> replicaStats = perGroupStats.getReplicaStats(); + assertEquals(1, replicaStats.size()); + for (SegmentReplicationShardStats replica : replicaStats) { + assertNotNull(replica.getCurrentReplicationState()); + } - replicationPerGroupStats = replicationStats.get(index_2); - assertEquals(1, replicationPerGroupStats.size()); - perGroupStats = replicationPerGroupStats.get(0); - assertEquals(perGroupStats.getShardId(), index_2_primary.shardId()); - replicaStats = perGroupStats.getReplicaStats(); - assertEquals(1, replicaStats.size()); - for (SegmentReplicationShardStats replica : replicaStats) { - assertNotNull(replica.getCurrentReplicationState()); - } + replicationPerGroupStats = replicationStats.get(index_2); + assertEquals(1, replicationPerGroupStats.size()); + perGroupStats = replicationPerGroupStats.get(0); + assertEquals(perGroupStats.getShardId(), index_2_primary.shardId()); + replicaStats = perGroupStats.getReplicaStats(); + assertEquals(1, replicaStats.size()); + for (SegmentReplicationShardStats replica : replicaStats) { + assertNotNull(replica.getCurrentReplicationState()); + } + }, 30, TimeUnit.SECONDS); // test only single index queried. - segmentReplicationStatsResponse = client().admin() + SegmentReplicationStatsResponse segmentReplicationStatsResponse = dataNodeClient().admin() .indices() .prepareSegmentReplicationStats() .setIndices(index_2) From c6cebc7a5ab0de954ab7c70f2c5415f888e23f84 Mon Sep 17 00:00:00 2001 From: Ashish <ssashish@amazon.com> Date: Wed, 31 Jan 2024 16:44:19 +0530 Subject: [PATCH 150/178] Fix flaky RemoteIndexRecoveryIT testRerouteRecovery test #9580 (#11918) Signed-off-by: Ashish Singh <ssashish@amazon.com> --- .../indices/recovery/IndexRecoveryIT.java | 4 +-- .../remotestore/RemoteIndexRecoveryIT.java | 6 ---- .../opensearch/test/OpenSearchTestCase.java | 33 +++++++++++++++++++ 3 files changed, 35 insertions(+), 8 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java index e4f1f8717f899..72e680e22ed75 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexRecoveryIT.java @@ -523,12 +523,12 @@ public void testRerouteRecovery() throws Exception { logger.info("--> waiting for recovery to start both on source and target"); final Index index = resolveIndex(INDEX_NAME); - assertBusy(() -> { + assertBusyWithFixedSleepTime(() -> { IndicesService indicesService = internalCluster().getInstance(IndicesService.class, nodeA); assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsSource(), equalTo(1)); indicesService = internalCluster().getInstance(IndicesService.class, nodeB); assertThat(indicesService.indexServiceSafe(index).getShard(0).recoveryStats().currentAsTarget(), equalTo(1)); - }); + }, TimeValue.timeValueSeconds(10), TimeValue.timeValueMillis(500)); logger.info("--> request recoveries"); RecoveryResponse response = client().admin().indices().prepareRecoveries(INDEX_NAME).execute().actionGet(); diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java index c957f1b338bfe..6de61cf203c60 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteIndexRecoveryIT.java @@ -157,10 +157,4 @@ public void testDisconnectsDuringRecovery() { public void testReplicaRecovery() { } - - @AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/9580") - public void testRerouteRecovery() { - - } - } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index b5ff30deecf5c..96bffcf2d3692 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -83,6 +83,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateUtils; import org.opensearch.common.time.FormatNames; +import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.common.util.concurrent.ThreadContext; @@ -1095,6 +1096,38 @@ public static void assertBusy(CheckedRunnable<Exception> codeBlock, long maxWait } } + /** + * Runs the code block for the provided max wait time and sleeping for fixed sleep time, waiting for no assertions to trip. + */ + public static void assertBusyWithFixedSleepTime(CheckedRunnable<Exception> codeBlock, TimeValue maxWaitTime, TimeValue sleepTime) + throws Exception { + long maxTimeInMillis = maxWaitTime.millis(); + long sleepTimeInMillis = sleepTime.millis(); + if (sleepTimeInMillis > maxTimeInMillis) { + throw new IllegalArgumentException("sleepTime is more than the maxWaitTime"); + } + long sum = 0; + List<AssertionError> failures = new ArrayList<>(); + while (sum <= maxTimeInMillis) { + try { + codeBlock.run(); + return; + } catch (AssertionError e) { + failures.add(e); + } + sum += sleepTimeInMillis; + Thread.sleep(sleepTimeInMillis); + } + try { + codeBlock.run(); + } catch (AssertionError e) { + for (AssertionError failure : failures) { + e.addSuppressed(failure); + } + throw e; + } + } + /** * Periodically execute the supplied function until it returns true, or a timeout * is reached. This version uses a timeout of 10 seconds. If at all possible, From bf5e628d82adf038f618d894a309d8dd57655fcd Mon Sep 17 00:00:00 2001 From: Varun Bansal <bansvaru@amazon.com> Date: Wed, 31 Jan 2024 21:30:13 +0530 Subject: [PATCH 151/178] fix flaky test - handle IllegalStateException and throw AssertionError to use assertbusy (#11981) * fix flaky test - handle IllegalStateException and throw AssertionError to use assertbusy Signed-off-by: bansvaru <bansvaru@amazon.com> * Empty Commit to retry the build Signed-off-by: bansvaru <bansvaru@amazon.com> --------- Signed-off-by: bansvaru <bansvaru@amazon.com> --- .../RemoteStoreClusterStateRestoreIT.java | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java index c61e2ec6e4f6c..3f90732f1f13d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreClusterStateRestoreIT.java @@ -310,10 +310,16 @@ private void validateCurrentMetadata() throws Exception { internalCluster().getClusterManagerName() ); assertBusy(() -> { - ClusterMetadataManifest manifest = remoteClusterStateService.getLatestClusterMetadataManifest( - getClusterState().getClusterName().value(), - getClusterState().metadata().clusterUUID() - ).get(); + ClusterMetadataManifest manifest; + try { + manifest = remoteClusterStateService.getLatestClusterMetadataManifest( + getClusterState().getClusterName().value(), + getClusterState().metadata().clusterUUID() + ).get(); + } catch (IllegalStateException e) { + // AssertionError helps us use assertBusy and retry validation if failed due to a race condition. + throw new AssertionError("Error while validating latest cluster metadata", e); + } ClusterState clusterState = getClusterState(); Metadata currentMetadata = clusterState.metadata(); assertEquals(currentMetadata.indices().size(), manifest.getIndices().size()); From cfcb128a641bb17e3a3d3f649ff087f7bd138595 Mon Sep 17 00:00:00 2001 From: Sarthak Aggarwal <sarthagg@amazon.com> Date: Wed, 31 Jan 2024 22:46:39 +0530 Subject: [PATCH 152/178] fix flaky 11610 (#12049) Signed-off-by: Sarthak Aggarwal <sarthagg@amazon.com> --- .../java/org/opensearch/indices/state/CloseIndexIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java index 547f9e7a8d380..87e5df8c48981 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/state/CloseIndexIT.java @@ -287,7 +287,7 @@ public void testCloseWhileDeletingIndices() throws Exception { throw new AssertionError(e); } try { - assertAcked(client().admin().indices().prepareDelete(indexToDelete)); + assertAcked(client().admin().indices().prepareDelete(indexToDelete).setTimeout("60s")); } catch (final Exception e) { assertException(e, indexToDelete); } @@ -301,7 +301,7 @@ public void testCloseWhileDeletingIndices() throws Exception { throw new AssertionError(e); } try { - client().admin().indices().prepareClose(indexToClose).get(); + client().admin().indices().prepareClose(indexToClose).setTimeout("60s").get(); } catch (final Exception e) { assertException(e, indexToClose); } From 16c52577d4f9a2544614d119e0cb6c6fc0d2afa5 Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli <vemulapallisarat@gmail.com> Date: Wed, 31 Jan 2024 12:57:33 -0800 Subject: [PATCH 153/178] Add additional logging for Azure repository stats (#12079) * Adding logging for AzureStats Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> * Debugging Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> * Removing debugging lines Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> * Addressing comments Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> * Addressing feedback 2 Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> --------- Signed-off-by: Sarat Vemulapalli <vemsarat@amazon.com> Signed-off-by: Sarat Vemulapalli <vemulapallisarat@gmail.com> --- .../repositories/azure/AzureBlobStoreRepositoryTests.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java index 986720ec431fe..1ba16422c9214 100644 --- a/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/plugins/repository-azure/src/internalClusterTest/java/org/opensearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -39,6 +39,8 @@ import com.azure.storage.common.implementation.Constants; import com.azure.storage.common.policy.RequestRetryOptions; import com.azure.storage.common.policy.RetryPolicyType; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.regex.Regex; import org.opensearch.common.settings.MockSecureSettings; @@ -188,6 +190,7 @@ protected String requestUniqueId(final HttpExchange exchange) { @SuppressForbidden(reason = "this test uses a HttpServer to emulate an Azure endpoint") private static class AzureHTTPStatsCollectorHandler extends HttpStatsCollectorHandler { + private static final Logger testLogger = LogManager.getLogger(AzureHTTPStatsCollectorHandler.class); private static final Pattern listPattern = Pattern.compile("GET /[a-zA-Z0-9]+\\??.+"); private static final Pattern getPattern = Pattern.compile("GET /[^?/]+/[^?/]+\\??.*"); @@ -197,6 +200,7 @@ private AzureHTTPStatsCollectorHandler(HttpHandler delegate) { @Override protected void maybeTrack(String request, Headers headers) { + testLogger.info(request, headers); if (getPattern.matcher(request).matches()) { trackRequest("GetBlob"); } else if (Regex.simpleMatch("HEAD /*/*", request)) { From 4471a8d49b3415a78a0d1429c63fc6cda4531235 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Thu, 1 Feb 2024 07:42:04 -0500 Subject: [PATCH 154/178] Add advance(int) for numeric values in order to allow point based optimization to kick in (#12089) * Add advance(int) for numeric values in order to allow point based optimization to kick in Signed-off-by: Andriy Redko <andriy.redko@aiven.io> * Address code review comments Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --------- Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- CHANGELOG.md | 1 + .../opensearch/search/sort/FieldSortIT.java | 181 ++++++++++++++++++ .../fielddata/AbstractNumericDocValues.java | 3 + .../opensearch/index/fielddata/FieldData.java | 34 ++++ .../index/fielddata/NumericDoubleValues.java | 22 +++ .../SingletonSortedNumericDoubleValues.java | 4 + .../SortableLongBitsNumericDocValues.java | 5 + ...SortableLongBitsToNumericDoubleValues.java | 4 + ...leLongBitsToSortedNumericDoubleValues.java | 4 + .../fielddata/SortedNumericDoubleValues.java | 11 ++ .../UnsignedLongToNumericDoubleValues.java | 4 + ...signedLongToSortedNumericDoubleValues.java | 4 + .../plain/SortedNumericIndexFieldData.java | 20 ++ .../functionscore/DecayFunctionBuilder.java | 5 + .../org/opensearch/search/MultiValueMode.java | 10 + .../aggregations/support/MissingValues.java | 4 + .../aggregations/support/ValuesSource.java | 5 + 17 files changed, 321 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f9c2eda92ec73..e7fcf5ee6b35c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -250,6 +250,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix memory leak issue in ReorganizingLongHash ([#11953](https://github.com/opensearch-project/OpenSearch/issues/11953)) - Prevent setting remote_snapshot store type on index creation ([#11867](https://github.com/opensearch-project/OpenSearch/pull/11867)) - [BUG] Fix remote shards balancer when filtering throttled nodes ([#11724](https://github.com/opensearch-project/OpenSearch/pull/11724)) +- Add advance(int) for numeric values in order to allow point based optimization to kick in ([#12089](https://github.com/opensearch-project/OpenSearch/pull/12089)) ### Security diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index f77f5a682da25..6145c3b8d86ab 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -2389,4 +2389,185 @@ public void testLongSortOptimizationCorrectResults() throws InterruptedException } } + public void testSimpleSortsPoints() throws Exception { + final int docs = 100; + + Random random = random(); + assertAcked( + prepareCreate("test").setMapping( + XContentFactory.jsonBuilder() + .startObject() + .startObject("properties") + .startObject("str_value") + .field("type", "keyword") + .endObject() + .startObject("boolean_value") + .field("type", "boolean") + .endObject() + .startObject("byte_value") + .field("type", "byte") + .endObject() + .startObject("short_value") + .field("type", "short") + .endObject() + .startObject("integer_value") + .field("type", "integer") + .endObject() + .startObject("long_value") + .field("type", "long") + .endObject() + .startObject("unsigned_long_value") + .field("type", "unsigned_long") + .endObject() + .startObject("float_value") + .field("type", "float") + .endObject() + .startObject("half_float_value") + .field("type", "half_float") + .endObject() + .startObject("double_value") + .field("type", "double") + .endObject() + .endObject() + .endObject() + ) + ); + ensureGreen(); + BigInteger UNSIGNED_LONG_BASE = Numbers.MAX_UNSIGNED_LONG_VALUE.subtract(BigInteger.valueOf(10000 * docs)); + List<IndexRequestBuilder> builders = new ArrayList<>(); + for (int i = 0; i < docs / 2; i++) { + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject() + .field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })) + .field("boolean_value", true) + .field("byte_value", i) + .field("short_value", i) + .field("integer_value", i) + .field("long_value", i) + .field("unsigned_long_value", UNSIGNED_LONG_BASE.add(BigInteger.valueOf(10000 * i))) + .field("float_value", 32 * i) + .field("half_float_value", 16 * i) + .field("double_value", 64 * i) + .endObject() + ); + builders.add(builder); + } + + // We keep half of the docs with numeric values and other half without + for (int i = docs / 2; i < docs; i++) { + IndexRequestBuilder builder = client().prepareIndex("test") + .setId(Integer.toString(i)) + .setSource( + jsonBuilder().startObject().field("str_value", new String(new char[] { (char) (97 + i), (char) (97 + i) })).endObject() + ); + builders.add(builder); + } + + int j = 0; + Collections.shuffle(builders, random); + for (IndexRequestBuilder builder : builders) { + builder.get(); + if ((++j % 25) == 0) { + refresh(); + } + + } + refresh(); + indexRandomForConcurrentSearch("test"); + + final int size = 2; + // HALF_FLOAT + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("half_float_value", SortOrder.ASC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("half_float_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // FLOAT + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.ASC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("float_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // DOUBLE + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.ASC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch().setQuery(matchAllQuery()).setSize(size).addSort("double_value", SortOrder.DESC).get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + + // UNSIGNED_LONG + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("unsigned_long_value", SortOrder.ASC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + searchResponse = client().prepareSearch() + .setQuery(matchAllQuery()) + .setSize(size) + .addSort("unsigned_long_value", SortOrder.DESC) + .get(); + + assertHitCount(searchResponse, docs); + assertThat(searchResponse.getHits().getHits().length, equalTo(size)); + for (int i = 0; i < size; i++) { + assertThat(searchResponse.getHits().getAt(i).getId(), equalTo(Integer.toString(docs / 2 - 1 - i))); + } + + assertThat(searchResponse.toString(), not(containsString("error"))); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java b/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java index a2a70e280187a..3a2504ce92158 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/AbstractNumericDocValues.java @@ -43,6 +43,9 @@ * aggregations, which only use {@link #advanceExact(int)} and * {@link #longValue()}. * + * In case when optimizations based on point values are used, the {@link #advance(int)} + * and, optionally, {@link #cost()} have to be implemented as well. + * * @opensearch.internal */ public abstract class AbstractNumericDocValues extends NumericDocValues { diff --git a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java index e09de53dc05f7..6db6bbccacae5 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/FieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/FieldData.java @@ -37,6 +37,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BytesRef; import org.opensearch.common.Numbers; import org.opensearch.common.geo.GeoPoint; @@ -76,6 +77,10 @@ public double doubleValue() throws IOException { throw new UnsupportedOperationException(); } + @Override + public int advance(int target) throws IOException { + return DocIdSetIterator.NO_MORE_DOCS; + } }; } @@ -561,6 +566,10 @@ public boolean advanceExact(int doc) throws IOException { return values.advanceExact(doc); } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } /** @@ -591,6 +600,10 @@ public int docValueCount() { return values.docValueCount(); } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } /** @@ -622,6 +635,12 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + docID = values.advance(target); + return docID; + } } /** @@ -683,6 +702,11 @@ public boolean advanceExact(int target) throws IOException { public long longValue() throws IOException { return value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } @@ -715,6 +739,11 @@ public boolean advanceExact(int target) throws IOException { public long longValue() throws IOException { return value.longValue(); } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } @@ -742,6 +771,11 @@ public boolean advanceExact(int target) throws IOException { public double doubleValue() throws IOException { return value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java index b0f3400acfb3d..f69cfacaf35d4 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/NumericDoubleValues.java @@ -71,6 +71,11 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + return NumericDoubleValues.this.advance(target); + } }; } @@ -95,6 +100,23 @@ public long longValue() throws IOException { public int docID() { return docID; } + + @Override + public int advance(int target) throws IOException { + return NumericDoubleValues.this.advance(target); + } }; } + + /** + * Advances to the first beyond the current whose document number is greater than or equal to + * <i>target</i>, and returns the document number itself. Exhausts the iterator and returns {@link + * org.apache.lucene.search.DocIdSetIterator#NO_MORE_DOCS} if <i>target</i> is greater than the highest document number in the set. + * + * This method is being used by {@link org.apache.lucene.search.comparators.NumericComparator.NumericLeafComparator} when point values optimization kicks + * in and is implemented by most numeric types. + */ + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java index 4ee494ffb30aa..816445bb319f1 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SingletonSortedNumericDoubleValues.java @@ -69,4 +69,8 @@ public double nextValue() throws IOException { return in.doubleValue(); } + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java index 39aca38c331ea..e2739e462dea5 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsNumericDocValues.java @@ -74,4 +74,9 @@ public NumericDoubleValues getDoubleValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } + } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java index 150e114d342de..98a44c246f654 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java @@ -67,4 +67,8 @@ public NumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java index 1bae845c9b0d2..279a78ac51adf 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java @@ -72,4 +72,8 @@ public SortedNumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java index dce1aff9cc94f..be9064751b5f0 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/SortedNumericDoubleValues.java @@ -70,4 +70,15 @@ protected SortedNumericDoubleValues() {} */ public abstract int docValueCount(); + /** + * Advances to the first beyond the current whose document number is greater than or equal to + * <i>target</i>, and returns the document number itself. Exhausts the iterator and returns {@link + * org.apache.lucene.search.DocIdSetIterator#NO_MORE_DOCS} if <i>target</i> is greater than the highest document number in the set. + * + * This method is being used by {@link org.apache.lucene.search.comparators.NumericComparator.NumericLeafComparator} when point values optimization kicks + * in and is implemented by most numeric types. + */ + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java index 8d17146760d9e..d9e9dd6a293fd 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToNumericDoubleValues.java @@ -42,4 +42,8 @@ public NumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java index 90b49e19a8954..63c7e6162cc55 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java +++ b/server/src/main/java/org/opensearch/index/fielddata/UnsignedLongToSortedNumericDoubleValues.java @@ -47,4 +47,8 @@ public SortedNumericDocValues getLongValues() { return values; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } } diff --git a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java index b70752df9e826..0019a41e67c02 100644 --- a/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java +++ b/server/src/main/java/org/opensearch/index/fielddata/plain/SortedNumericIndexFieldData.java @@ -336,6 +336,11 @@ public double doubleValue() throws IOException { public boolean advanceExact(int doc) throws IOException { return in.advanceExact(doc); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -364,6 +369,11 @@ public double nextValue() throws IOException { public int docValueCount() { return in.docValueCount(); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -434,6 +444,11 @@ public double doubleValue() throws IOException { public boolean advanceExact(int doc) throws IOException { return in.advanceExact(doc); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** @@ -462,6 +477,11 @@ public double nextValue() throws IOException { public int docValueCount() { return in.docValueCount(); } + + @Override + public int advance(int target) throws IOException { + return in.advance(target); + } } /** diff --git a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java index cfc44d4434d3b..1c693f9761240 100644 --- a/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/functionscore/DecayFunctionBuilder.java @@ -560,6 +560,11 @@ public boolean needsScores() { protected NumericDoubleValues distance(LeafReaderContext context) { final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); return FieldData.replaceMissing(mode.select(new SortingNumericDoubleValues() { + @Override + public int advance(int target) throws IOException { + return doubleValues.advance(target); + } + @Override public boolean advanceExact(int docId) throws IOException { if (doubleValues.advanceExact(docId)) { diff --git a/server/src/main/java/org/opensearch/search/MultiValueMode.java b/server/src/main/java/org/opensearch/search/MultiValueMode.java index ca088203733c6..a99da674836f2 100644 --- a/server/src/main/java/org/opensearch/search/MultiValueMode.java +++ b/server/src/main/java/org/opensearch/search/MultiValueMode.java @@ -685,6 +685,11 @@ public boolean advanceExact(int target) throws IOException { public double doubleValue() throws IOException { return this.value; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } } @@ -745,6 +750,11 @@ public boolean advanceExact(int parentDoc) throws IOException { public double doubleValue() throws IOException { return lastEmittedValue; } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java index da1d9961ed81b..d21737a8366b2 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/MissingValues.java @@ -227,6 +227,10 @@ public String toString() { return "anon SortedNumericDoubleValues of [" + super.toString() + "]"; } + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } }; } diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java index 1a76183ac1a2d..3ce1f0447dfcc 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/ValuesSource.java @@ -576,6 +576,11 @@ public boolean advanceExact(int target) throws IOException { } return false; } + + @Override + public int advance(int target) throws IOException { + return doubleValues.advance(target); + } } } From 3c074617e6582d8347547df313cc50e7f2abfb36 Mon Sep 17 00:00:00 2001 From: Owais Kazi <owaiskazi19@gmail.com> Date: Thu, 1 Feb 2024 10:23:05 -0800 Subject: [PATCH 155/178] Added spotless for json files (#12076) * Added spotless for json files Signed-off-by: Owais Kazi <owaiskazi19@gmail.com> * Excluded json files with new line tests Signed-off-by: Owais Kazi <owaiskazi19@gmail.com> --------- Signed-off-by: Owais Kazi <owaiskazi19@gmail.com> --- gradle/formatting.gradle | 4 +++- .../org/opensearch/index/analysis/shingle_analysis.json | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/gradle/formatting.gradle b/gradle/formatting.gradle index 93e1127c97a56..f3a4bf5cc765b 100644 --- a/gradle/formatting.gradle +++ b/gradle/formatting.gradle @@ -99,7 +99,9 @@ allprojects { } } format 'misc', { - target '*.md', '*.gradle', '**/*.yaml', '**/*.yml', '**/*.svg' + target '*.md', '*.gradle', '**/*.json', '**/*.yaml', '**/*.yml', '**/*.svg' + + targetExclude '**/simple-bulk11.json', '**/simple-msearch5.json' trimTrailingWhitespace() endWithNewline() diff --git a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json index 4a4fc7d2c81b1..1ed56fa6dab4d 100644 --- a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json +++ b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json @@ -20,4 +20,4 @@ } } } -} \ No newline at end of file +} From f715ee1a485e550802accc1c2e3d8101208d4f0b Mon Sep 17 00:00:00 2001 From: Poojita Raj <poojiraj@amazon.com> Date: Thu, 1 Feb 2024 13:33:13 -0800 Subject: [PATCH 156/178] Modify shrink exception to be more informative (#12117) Signed-off-by: Poojita Raj <poojiraj@amazon.com> --- .../replication/SegmentReplicationResizeRequestIT.java | 3 ++- .../action/admin/indices/shrink/TransportResizeAction.java | 6 ++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java index fb06a97bd51c2..69411b2ff640a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/replication/SegmentReplicationResizeRequestIT.java @@ -87,7 +87,8 @@ public void testCreateShrinkIndexThrowsExceptionWhenReplicasBehind() throws Exce .get() ); assertEquals( - " For index [test] replica shards haven't caught up with primary, please retry after sometime.", + "Replication still in progress for index [test]. Please wait for replication to complete and retry. " + + "Use the _cat/segment_replication/test api to check if the index is up to date (e.g. bytes_behind == 0).", exception.getMessage() ); diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java index 23cd8efdcaf59..ca4c16935c2b9 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java @@ -168,9 +168,11 @@ protected void clusterManagerOperation( .getSegments() .getReplicationStats().maxBytesBehind != 0) { throw new IllegalStateException( - " For index [" + "Replication still in progress for index [" + sourceIndex - + "] replica shards haven't caught up with primary, please retry after sometime." + + "]. Please wait for replication to complete and retry. Use the _cat/segment_replication/" + + sourceIndex + + " api to check if the index is up to date (e.g. bytes_behind == 0)." ); } From 90a815eea8c06c31296f5ba6e79aa34acfe7bbd2 Mon Sep 17 00:00:00 2001 From: Kunal Kotwani <kkotwani@amazon.com> Date: Thu, 1 Feb 2024 16:12:42 -0800 Subject: [PATCH 157/178] Mute flaky test testThreeNodesNoClusterManagerBlock (#12093) Signed-off-by: Kunal Kotwani <kkotwani@amazon.com> --- build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/build.gradle b/build.gradle index e082967255b10..375ab91e99e94 100644 --- a/build.gradle +++ b/build.gradle @@ -545,6 +545,7 @@ subprojects { includeClasses.add("org.opensearch.snapshots.SnapshotStatusApisIT") includeClasses.add("org.opensearch.test.rest.ClientYamlTestSuiteIT") includeClasses.add("org.opensearch.upgrade.DetectEsInstallationTaskTests") + includeClasses.add("org.opensearch.cluster.MinimumClusterManagerNodesIT") } } } From 8cb4bb4c1776aaee3c4065c5a6b43e86a1e52daa Mon Sep 17 00:00:00 2001 From: Sudarshan baliga <baliga108@gmail.com> Date: Fri, 2 Feb 2024 15:02:42 +0530 Subject: [PATCH 158/178] Added transport action for bulk async shard fetch for primary shards (#8218) * Added transport action for bulk async shard fetch for primary shards Signed-off-by: sudarshan baliga <baliga108@gmail.com> Co-authored-by: Shivansh Arora <hishiv@amazon.com> --- .../gateway/GatewayRecoveryTestUtils.java | 77 ++++ .../gateway/RecoveryFromGatewayIT.java | 99 +++++ .../org/opensearch/gateway/GatewayModule.java | 1 + ...ansportNodesGatewayStartedShardHelper.java | 114 +++++ ...ransportNodesListGatewayStartedShards.java | 86 +--- ...ortNodesListGatewayStartedShardsBatch.java | 401 ++++++++++++++++++ 6 files changed, 708 insertions(+), 70 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java create mode 100644 server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java create mode 100644 server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java new file mode 100644 index 0000000000000..2b6a5b4ee6867 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/GatewayRecoveryTestUtils.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.opensearch.action.admin.cluster.state.ClusterStateRequest; +import org.opensearch.action.admin.cluster.state.ClusterStateResponse; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.index.shard.ShardPath; +import org.opensearch.indices.store.ShardAttributes; + +import java.io.IOException; +import java.nio.file.DirectoryStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import static org.opensearch.test.OpenSearchIntegTestCase.client; +import static org.opensearch.test.OpenSearchIntegTestCase.internalCluster; +import static org.opensearch.test.OpenSearchIntegTestCase.resolveIndex; + +public class GatewayRecoveryTestUtils { + + public static DiscoveryNode[] getDiscoveryNodes() throws ExecutionException, InterruptedException { + final ClusterStateRequest clusterStateRequest = new ClusterStateRequest(); + clusterStateRequest.local(false); + clusterStateRequest.clear().nodes(true).routingTable(true).indices("*"); + ClusterStateResponse clusterStateResponse = client().admin().cluster().state(clusterStateRequest).get(); + final List<DiscoveryNode> nodes = new LinkedList<>(clusterStateResponse.getState().nodes().getDataNodes().values()); + DiscoveryNode[] disNodesArr = new DiscoveryNode[nodes.size()]; + nodes.toArray(disNodesArr); + return disNodesArr; + } + + public static Map<ShardId, ShardAttributes> prepareRequestMap(String[] indices, int primaryShardCount) { + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = new HashMap<>(); + for (String indexName : indices) { + final Index index = resolveIndex(indexName); + final String customDataPath = IndexMetadata.INDEX_DATA_PATH_SETTING.get( + client().admin().indices().prepareGetSettings(indexName).get().getIndexToSettings().get(indexName) + ); + for (int shardIdNum = 0; shardIdNum < primaryShardCount; shardIdNum++) { + final ShardId shardId = new ShardId(index, shardIdNum); + shardIdShardAttributesMap.put(shardId, new ShardAttributes(shardId, customDataPath)); + } + } + return shardIdShardAttributesMap; + } + + public static void corruptShard(String nodeName, ShardId shardId) throws IOException, InterruptedException { + for (Path path : internalCluster().getInstance(NodeEnvironment.class, nodeName).availableShardPaths(shardId)) { + final Path indexPath = path.resolve(ShardPath.INDEX_FOLDER_NAME); + if (Files.exists(indexPath)) { // multi data path might only have one path in use + try (DirectoryStream<Path> stream = Files.newDirectoryStream(indexPath)) { + for (Path item : stream) { + if (item.getFileName().toString().startsWith("segments_")) { + Files.delete(item); + } + } + } + } + } + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java index 229cd7bffad2f..9da1336642a64 100644 --- a/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/gateway/RecoveryFromGatewayIT.java @@ -36,6 +36,8 @@ import org.opensearch.action.admin.cluster.configuration.AddVotingConfigExclusionsRequest; import org.opensearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsAction; import org.opensearch.action.admin.cluster.configuration.ClearVotingConfigExclusionsRequest; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsGroup; +import org.opensearch.action.admin.cluster.shards.ClusterSearchShardsResponse; import org.opensearch.action.admin.indices.recovery.RecoveryResponse; import org.opensearch.action.admin.indices.stats.IndexStats; import org.opensearch.action.admin.indices.stats.ShardStats; @@ -60,6 +62,7 @@ import org.opensearch.indices.IndicesService; import org.opensearch.indices.recovery.RecoveryState; import org.opensearch.indices.replication.common.ReplicationLuceneIndex; +import org.opensearch.indices.store.ShardAttributes; import org.opensearch.plugins.Plugin; import org.opensearch.test.InternalSettingsPlugin; import org.opensearch.test.InternalTestCluster.RestartCallback; @@ -85,6 +88,9 @@ import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.corruptShard; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.getDiscoveryNodes; +import static org.opensearch.gateway.GatewayRecoveryTestUtils.prepareRequestMap; import static org.opensearch.gateway.GatewayService.RECOVER_AFTER_NODES_SETTING; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.index.query.QueryBuilders.termQuery; @@ -734,4 +740,97 @@ public void testMessyElectionsStillMakeClusterGoGreen() throws Exception { internalCluster().fullRestart(); ensureGreen("test"); } + + public void testSingleShardFetchUsingBatchAction() { + String indexName = "test"; + int numOfShards = 1; + prepareIndex(indexName, numOfShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName }, numOfShards); + + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName).get(); + + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(searchShardsResponse.getNodes(), shardIdShardAttributesMap) + ); + final Index index = resolveIndex(indexName); + final ShardId shardId = new ShardId(index, 0); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(searchShardsResponse.getNodes()[0].getId()) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNodeGatewayStartedShardsHappyCase(nodeGatewayStartedShards); + } + + public void testShardFetchMultiNodeMultiIndexesUsingBatchAction() { + // start node + internalCluster().startNode(); + String indexName1 = "test1"; + String indexName2 = "test2"; + int numShards = internalCluster().numDataNodes(); + // assign one primary shard each to the data nodes + prepareIndex(indexName1, numShards); + prepareIndex(indexName2, numShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName1, indexName2 }, numShards); + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName1, indexName2).get(); + assertEquals(internalCluster().numDataNodes(), searchShardsResponse.getNodes().length); + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(searchShardsResponse.getNodes(), shardIdShardAttributesMap) + ); + for (ClusterSearchShardsGroup clusterSearchShardsGroup : searchShardsResponse.getGroups()) { + ShardId shardId = clusterSearchShardsGroup.getShardId(); + assertEquals(1, clusterSearchShardsGroup.getShards().length); + String nodeId = clusterSearchShardsGroup.getShards()[0].currentNodeId(); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(nodeId) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNodeGatewayStartedShardsHappyCase(nodeGatewayStartedShards); + } + } + + public void testShardFetchCorruptedShardsUsingBatchAction() throws Exception { + String indexName = "test"; + int numOfShards = 1; + prepareIndex(indexName, numOfShards); + Map<ShardId, ShardAttributes> shardIdShardAttributesMap = prepareRequestMap(new String[] { indexName }, numOfShards); + ClusterSearchShardsResponse searchShardsResponse = client().admin().cluster().prepareSearchShards(indexName).get(); + final Index index = resolveIndex(indexName); + final ShardId shardId = new ShardId(index, 0); + corruptShard(searchShardsResponse.getNodes()[0].getName(), shardId); + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch response; + internalCluster().restartNode(searchShardsResponse.getNodes()[0].getName()); + response = ActionTestUtils.executeBlocking( + internalCluster().getInstance(TransportNodesListGatewayStartedShardsBatch.class), + new TransportNodesListGatewayStartedShardsBatch.Request(getDiscoveryNodes(), shardIdShardAttributesMap) + ); + DiscoveryNode[] discoveryNodes = getDiscoveryNodes(); + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards = response.getNodesMap() + .get(discoveryNodes[0].getId()) + .getNodeGatewayStartedShardsBatch() + .get(shardId); + assertNotNull(nodeGatewayStartedShards.storeException()); + assertNotNull(nodeGatewayStartedShards.allocationId()); + assertTrue(nodeGatewayStartedShards.primary()); + } + + private void assertNodeGatewayStartedShardsHappyCase( + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard nodeGatewayStartedShards + ) { + assertNull(nodeGatewayStartedShards.storeException()); + assertNotNull(nodeGatewayStartedShards.allocationId()); + assertTrue(nodeGatewayStartedShards.primary()); + } + + private void prepareIndex(String indexName, int numberOfPrimaryShards) { + createIndex( + indexName, + Settings.builder().put(SETTING_NUMBER_OF_SHARDS, numberOfPrimaryShards).put(SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + index(indexName, "type", "1", Collections.emptyMap()); + flush(indexName); + } } diff --git a/server/src/main/java/org/opensearch/gateway/GatewayModule.java b/server/src/main/java/org/opensearch/gateway/GatewayModule.java index 59ec0243c88c9..847ba01737332 100644 --- a/server/src/main/java/org/opensearch/gateway/GatewayModule.java +++ b/server/src/main/java/org/opensearch/gateway/GatewayModule.java @@ -47,6 +47,7 @@ protected void configure() { bind(GatewayService.class).asEagerSingleton(); bind(TransportNodesListGatewayMetaState.class).asEagerSingleton(); bind(TransportNodesListGatewayStartedShards.class).asEagerSingleton(); + bind(TransportNodesListGatewayStartedShardsBatch.class).asEagerSingleton(); bind(LocalAllocateDangledIndices.class).asEagerSingleton(); } } diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java b/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java new file mode 100644 index 0000000000000..403e3e96fa209 --- /dev/null +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesGatewayStartedShardHelper.java @@ -0,0 +1,114 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.OpenSearchException; +import org.opensearch.cluster.metadata.IndexMetadata; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.index.IndexSettings; +import org.opensearch.index.shard.IndexShard; +import org.opensearch.index.shard.ShardPath; +import org.opensearch.index.shard.ShardStateMetadata; +import org.opensearch.index.store.Store; +import org.opensearch.indices.IndicesService; + +import java.io.IOException; + +/** + * This class has the common code used in {@link TransportNodesListGatewayStartedShards} and + * {@link TransportNodesListGatewayStartedShardsBatch} to get the shard info on the local node. + * <p> + * This class should not be used to add more functions and will be removed when the + * {@link TransportNodesListGatewayStartedShards} will be deprecated and all the code will be moved to + * {@link TransportNodesListGatewayStartedShardsBatch} + * + * @opensearch.internal + */ +public class TransportNodesGatewayStartedShardHelper { + public static TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard getShardInfoOnLocalNode( + Logger logger, + final ShardId shardId, + NamedXContentRegistry namedXContentRegistry, + NodeEnvironment nodeEnv, + IndicesService indicesService, + String shardDataPathInRequest, + Settings settings, + ClusterService clusterService + ) throws IOException { + logger.trace("{} loading local shard state info", shardId); + ShardStateMetadata shardStateMetadata = ShardStateMetadata.FORMAT.loadLatestState( + logger, + namedXContentRegistry, + nodeEnv.availableShardPaths(shardId) + ); + if (shardStateMetadata != null) { + if (indicesService.getShardOrNull(shardId) == null + && shardStateMetadata.indexDataLocation == ShardStateMetadata.IndexDataLocation.LOCAL) { + final String customDataPath; + if (shardDataPathInRequest != null) { + customDataPath = shardDataPathInRequest; + } else { + // TODO: Fallback for BWC with older OpenSearch versions. + // Remove once request.getCustomDataPath() always returns non-null + final IndexMetadata metadata = clusterService.state().metadata().index(shardId.getIndex()); + if (metadata != null) { + customDataPath = new IndexSettings(metadata, settings).customDataPath(); + } else { + logger.trace("{} node doesn't have meta data for the requests index", shardId); + throw new OpenSearchException("node doesn't have meta data for index " + shardId.getIndex()); + } + } + // we don't have an open shard on the store, validate the files on disk are openable + ShardPath shardPath = null; + try { + shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); + if (shardPath == null) { + throw new IllegalStateException(shardId + " no shard path found"); + } + Store.tryOpenIndex(shardPath.resolveIndex(), shardId, nodeEnv::shardLock, logger); + } catch (Exception exception) { + final ShardPath finalShardPath = shardPath; + logger.trace( + () -> new ParameterizedMessage( + "{} can't open index for shard [{}] in path [{}]", + shardId, + shardStateMetadata, + (finalShardPath != null) ? finalShardPath.resolveIndex() : "" + ), + exception + ); + String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard( + allocationId, + shardStateMetadata.primary, + null, + exception + ); + } + } + + logger.debug("{} shard state info found: [{}]", shardId, shardStateMetadata); + String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; + final IndexShard shard = indicesService.getShardOrNull(shardId); + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard( + allocationId, + shardStateMetadata.primary, + shard != null ? shard.getLatestReplicationCheckpoint() : null + ); + } + logger.trace("{} no local shard info found", shardId); + return new TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard(null, false, null); + } +} diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java index 601a5c671d67c..0ba872aab9974 100644 --- a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShards.java @@ -32,7 +32,6 @@ package org.opensearch.gateway; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.ActionType; @@ -43,7 +42,6 @@ import org.opensearch.action.support.nodes.BaseNodesResponse; import org.opensearch.action.support.nodes.TransportNodesAction; import org.opensearch.cluster.ClusterName; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.Nullable; @@ -55,11 +53,6 @@ import org.opensearch.core.index.shard.ShardId; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexSettings; -import org.opensearch.index.shard.IndexShard; -import org.opensearch.index.shard.ShardPath; -import org.opensearch.index.shard.ShardStateMetadata; -import org.opensearch.index.store.Store; import org.opensearch.indices.IndicesService; import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; import org.opensearch.indices.store.ShardAttributes; @@ -72,6 +65,8 @@ import java.util.Map; import java.util.Objects; +import static org.opensearch.gateway.TransportNodesGatewayStartedShardHelper.getShardInfoOnLocalNode; + /** * This transport action is used to fetch the shard version from each node during primary allocation in {@link GatewayAllocator}. * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate @@ -159,72 +154,23 @@ protected NodesGatewayStartedShards newResponse( @Override protected NodeGatewayStartedShards nodeOperation(NodeRequest request) { try { - final ShardId shardId = request.getShardId(); - logger.trace("{} loading local shard state info", shardId); - ShardStateMetadata shardStateMetadata = ShardStateMetadata.FORMAT.loadLatestState( + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShard shardInfo = getShardInfoOnLocalNode( logger, + request.getShardId(), namedXContentRegistry, - nodeEnv.availableShardPaths(request.shardId) + nodeEnv, + indicesService, + request.getCustomDataPath(), + settings, + clusterService + ); + return new NodeGatewayStartedShards( + clusterService.localNode(), + shardInfo.allocationId(), + shardInfo.primary(), + shardInfo.replicationCheckpoint(), + shardInfo.storeException() ); - if (shardStateMetadata != null) { - if (indicesService.getShardOrNull(shardId) == null - && shardStateMetadata.indexDataLocation == ShardStateMetadata.IndexDataLocation.LOCAL) { - final String customDataPath; - if (request.getCustomDataPath() != null) { - customDataPath = request.getCustomDataPath(); - } else { - // TODO: Fallback for BWC with older OpenSearch versions. - // Remove once request.getCustomDataPath() always returns non-null - final IndexMetadata metadata = clusterService.state().metadata().index(shardId.getIndex()); - if (metadata != null) { - customDataPath = new IndexSettings(metadata, settings).customDataPath(); - } else { - logger.trace("{} node doesn't have meta data for the requests index", shardId); - throw new OpenSearchException("node doesn't have meta data for index " + shardId.getIndex()); - } - } - // we don't have an open shard on the store, validate the files on disk are openable - ShardPath shardPath = null; - try { - shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); - if (shardPath == null) { - throw new IllegalStateException(shardId + " no shard path found"); - } - Store.tryOpenIndex(shardPath.resolveIndex(), shardId, nodeEnv::shardLock, logger); - } catch (Exception exception) { - final ShardPath finalShardPath = shardPath; - logger.trace( - () -> new ParameterizedMessage( - "{} can't open index for shard [{}] in path [{}]", - shardId, - shardStateMetadata, - (finalShardPath != null) ? finalShardPath.resolveIndex() : "" - ), - exception - ); - String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; - return new NodeGatewayStartedShards( - clusterService.localNode(), - allocationId, - shardStateMetadata.primary, - null, - exception - ); - } - } - - logger.debug("{} shard state info found: [{}]", shardId, shardStateMetadata); - String allocationId = shardStateMetadata.allocationId != null ? shardStateMetadata.allocationId.getId() : null; - final IndexShard shard = indicesService.getShardOrNull(shardId); - return new NodeGatewayStartedShards( - clusterService.localNode(), - allocationId, - shardStateMetadata.primary, - shard != null ? shard.getLatestReplicationCheckpoint() : null - ); - } - logger.trace("{} no local shard info found", shardId); - return new NodeGatewayStartedShards(clusterService.localNode(), null, false, null); } catch (Exception e) { throw new OpenSearchException("failed to load started shards", e); } diff --git a/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java new file mode 100644 index 0000000000000..bc327c1b85748 --- /dev/null +++ b/server/src/main/java/org/opensearch/gateway/TransportNodesListGatewayStartedShardsBatch.java @@ -0,0 +1,401 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.gateway; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.ActionType; +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.nodes.BaseNodeResponse; +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.action.support.nodes.BaseNodesResponse; +import org.opensearch.action.support.nodes.TransportNodesAction; +import org.opensearch.cluster.ClusterName; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionListener; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.index.shard.ShardId; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.indices.IndicesService; +import org.opensearch.indices.replication.checkpoint.ReplicationCheckpoint; +import org.opensearch.indices.store.ShardAttributes; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportRequest; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.opensearch.gateway.TransportNodesGatewayStartedShardHelper.getShardInfoOnLocalNode; + +/** + * This transport action is used to fetch batch of unassigned shard version from each node during primary allocation in {@link GatewayAllocator}. + * We use this to find out which node holds the latest shard version and which of them used to be a primary in order to allocate + * shards after node or cluster restarts. + * + * @opensearch.internal + */ +public class TransportNodesListGatewayStartedShardsBatch extends TransportNodesAction< + TransportNodesListGatewayStartedShardsBatch.Request, + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch, + TransportNodesListGatewayStartedShardsBatch.NodeRequest, + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShardsBatch> + implements + AsyncShardFetch.Lister< + TransportNodesListGatewayStartedShardsBatch.NodesGatewayStartedShardsBatch, + TransportNodesListGatewayStartedShardsBatch.NodeGatewayStartedShardsBatch> { + + public static final String ACTION_NAME = "internal:gateway/local/started_shards_batch"; + public static final ActionType<NodesGatewayStartedShardsBatch> TYPE = new ActionType<>( + ACTION_NAME, + NodesGatewayStartedShardsBatch::new + ); + + private final Settings settings; + private final NodeEnvironment nodeEnv; + private final IndicesService indicesService; + private final NamedXContentRegistry namedXContentRegistry; + + @Inject + public TransportNodesListGatewayStartedShardsBatch( + Settings settings, + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + NodeEnvironment env, + IndicesService indicesService, + NamedXContentRegistry namedXContentRegistry + ) { + super( + ACTION_NAME, + threadPool, + clusterService, + transportService, + actionFilters, + Request::new, + NodeRequest::new, + ThreadPool.Names.FETCH_SHARD_STARTED, + NodeGatewayStartedShardsBatch.class + ); + this.settings = settings; + this.nodeEnv = env; + this.indicesService = indicesService; + this.namedXContentRegistry = namedXContentRegistry; + } + + @Override + public void list( + Map<ShardId, ShardAttributes> shardAttributesMap, + DiscoveryNode[] nodes, + ActionListener<NodesGatewayStartedShardsBatch> listener + ) { + execute(new Request(nodes, shardAttributesMap), listener); + } + + @Override + protected NodeRequest newNodeRequest(Request request) { + return new NodeRequest(request); + } + + @Override + protected NodeGatewayStartedShardsBatch newNodeResponse(StreamInput in) throws IOException { + return new NodeGatewayStartedShardsBatch(in); + } + + @Override + protected NodesGatewayStartedShardsBatch newResponse( + Request request, + List<NodeGatewayStartedShardsBatch> responses, + List<FailedNodeException> failures + ) { + return new NodesGatewayStartedShardsBatch(clusterService.getClusterName(), responses, failures); + } + + /** + * This function is similar to nodeOperation method of {@link TransportNodesListGatewayStartedShards} we loop over + * the shards here and populate the data about the shards held by the local node. + * + * @param request Request containing the map shardIdsWithCustomDataPath. + * @return NodeGatewayStartedShardsBatch contains the data about the primary shards held by the local node + */ + @Override + protected NodeGatewayStartedShardsBatch nodeOperation(NodeRequest request) { + Map<ShardId, NodeGatewayStartedShard> shardsOnNode = new HashMap<>(); + for (ShardAttributes shardAttr : request.shardAttributes.values()) { + final ShardId shardId = shardAttr.getShardId(); + try { + shardsOnNode.put( + shardId, + getShardInfoOnLocalNode( + logger, + shardId, + namedXContentRegistry, + nodeEnv, + indicesService, + shardAttr.getCustomDataPath(), + settings, + clusterService + ) + ); + } catch (Exception e) { + shardsOnNode.put( + shardId, + new NodeGatewayStartedShard(null, false, null, new OpenSearchException("failed to load started shards", e)) + ); + } + } + return new NodeGatewayStartedShardsBatch(clusterService.localNode(), shardsOnNode); + } + + /** + * This is used in constructing the request for making the transport request to set of other node. + * Refer {@link TransportNodesAction} class start method. + * + * @opensearch.internal + */ + public static class Request extends BaseNodesRequest<Request> { + private final Map<ShardId, ShardAttributes> shardAttributes; + + public Request(StreamInput in) throws IOException { + super(in); + shardAttributes = in.readMap(ShardId::new, ShardAttributes::new); + } + + public Request(DiscoveryNode[] nodes, Map<ShardId, ShardAttributes> shardAttributes) { + super(nodes); + this.shardAttributes = Objects.requireNonNull(shardAttributes); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(shardAttributes, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + + public Map<ShardId, ShardAttributes> getShardAttributes() { + return shardAttributes; + } + } + + /** + * Responses received from set of other nodes is clubbed into this class and sent back to the caller + * of this transport request. Refer {@link TransportNodesAction} + * + * @opensearch.internal + */ + public static class NodesGatewayStartedShardsBatch extends BaseNodesResponse<NodeGatewayStartedShardsBatch> { + + public NodesGatewayStartedShardsBatch(StreamInput in) throws IOException { + super(in); + } + + public NodesGatewayStartedShardsBatch( + ClusterName clusterName, + List<NodeGatewayStartedShardsBatch> nodes, + List<FailedNodeException> failures + ) { + super(clusterName, nodes, failures); + } + + @Override + protected List<NodeGatewayStartedShardsBatch> readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeGatewayStartedShardsBatch::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List<NodeGatewayStartedShardsBatch> nodes) throws IOException { + out.writeList(nodes); + } + } + + /** + * NodeRequest class is for deserializing the request received by this node from other node for this transport action. + * This is used in {@link TransportNodesAction} + * + * @opensearch.internal + */ + public static class NodeRequest extends TransportRequest { + private final Map<ShardId, ShardAttributes> shardAttributes; + + public NodeRequest(StreamInput in) throws IOException { + super(in); + shardAttributes = in.readMap(ShardId::new, ShardAttributes::new); + } + + public NodeRequest(Request request) { + this.shardAttributes = Objects.requireNonNull(request.getShardAttributes()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(shardAttributes, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + } + + /** + * This class encapsulates the metadata about a started shard that needs to be persisted or sent between nodes. + * This is used in {@link NodeGatewayStartedShardsBatch} to construct the response for each node, instead of + * {@link TransportNodesListGatewayStartedShards.NodeGatewayStartedShards} because we don't need to save an extra + * {@link DiscoveryNode} object like in {@link TransportNodesListGatewayStartedShards.NodeGatewayStartedShards} + * which reduces memory footprint of its objects. + * + * @opensearch.internal + */ + public static class NodeGatewayStartedShard { + private final String allocationId; + private final boolean primary; + private final Exception storeException; + private final ReplicationCheckpoint replicationCheckpoint; + + public NodeGatewayStartedShard(StreamInput in) throws IOException { + allocationId = in.readOptionalString(); + primary = in.readBoolean(); + if (in.readBoolean()) { + storeException = in.readException(); + } else { + storeException = null; + } + if (in.readBoolean()) { + replicationCheckpoint = new ReplicationCheckpoint(in); + } else { + replicationCheckpoint = null; + } + } + + public NodeGatewayStartedShard(String allocationId, boolean primary, ReplicationCheckpoint replicationCheckpoint) { + this(allocationId, primary, replicationCheckpoint, null); + } + + public NodeGatewayStartedShard( + String allocationId, + boolean primary, + ReplicationCheckpoint replicationCheckpoint, + Exception storeException + ) { + this.allocationId = allocationId; + this.primary = primary; + this.replicationCheckpoint = replicationCheckpoint; + this.storeException = storeException; + } + + public String allocationId() { + return this.allocationId; + } + + public boolean primary() { + return this.primary; + } + + public ReplicationCheckpoint replicationCheckpoint() { + return this.replicationCheckpoint; + } + + public Exception storeException() { + return this.storeException; + } + + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(allocationId); + out.writeBoolean(primary); + if (storeException != null) { + out.writeBoolean(true); + out.writeException(storeException); + } else { + out.writeBoolean(false); + } + if (replicationCheckpoint != null) { + out.writeBoolean(true); + replicationCheckpoint.writeTo(out); + } else { + out.writeBoolean(false); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + + NodeGatewayStartedShard that = (NodeGatewayStartedShard) o; + + return primary == that.primary + && Objects.equals(allocationId, that.allocationId) + && Objects.equals(storeException, that.storeException) + && Objects.equals(replicationCheckpoint, that.replicationCheckpoint); + } + + @Override + public int hashCode() { + int result = (allocationId != null ? allocationId.hashCode() : 0); + result = 31 * result + (primary ? 1 : 0); + result = 31 * result + (storeException != null ? storeException.hashCode() : 0); + result = 31 * result + (replicationCheckpoint != null ? replicationCheckpoint.hashCode() : 0); + return result; + } + + @Override + public String toString() { + StringBuilder buf = new StringBuilder(); + buf.append("NodeGatewayStartedShards[").append("allocationId=").append(allocationId).append(",primary=").append(primary); + if (storeException != null) { + buf.append(",storeException=").append(storeException); + } + if (replicationCheckpoint != null) { + buf.append(",ReplicationCheckpoint=").append(replicationCheckpoint.toString()); + } + buf.append("]"); + return buf.toString(); + } + } + + /** + * This is the response from a single node, this is used in {@link NodesGatewayStartedShardsBatch} for creating + * node to its response mapping for this transport request. + * Refer {@link TransportNodesAction} start method + * + * @opensearch.internal + */ + public static class NodeGatewayStartedShardsBatch extends BaseNodeResponse { + private final Map<ShardId, NodeGatewayStartedShard> nodeGatewayStartedShardsBatch; + + public Map<ShardId, NodeGatewayStartedShard> getNodeGatewayStartedShardsBatch() { + return nodeGatewayStartedShardsBatch; + } + + public NodeGatewayStartedShardsBatch(StreamInput in) throws IOException { + super(in); + this.nodeGatewayStartedShardsBatch = in.readMap(ShardId::new, NodeGatewayStartedShard::new); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(nodeGatewayStartedShardsBatch, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + } + + public NodeGatewayStartedShardsBatch(DiscoveryNode node, Map<ShardId, NodeGatewayStartedShard> nodeGatewayStartedShardsBatch) { + super(node); + this.nodeGatewayStartedShardsBatch = nodeGatewayStartedShardsBatch; + } + } +} From 57cc0dd65b88e0ddda494b3469538a09e5d55579 Mon Sep 17 00:00:00 2001 From: Varun Bansal <bansvaru@amazon.com> Date: Fri, 2 Feb 2024 15:18:12 +0530 Subject: [PATCH 159/178] Create a clone of local segements size map used for Remote Segment Stats until sync to remote completes (#11896) Signed-off-by: bansvaru <bansvaru@amazon.com> --- .../remote/RemoteSegmentTransferTracker.java | 10 ++++-- .../shard/RemoteStoreRefreshListener.java | 34 +++++++++++++------ 2 files changed, 31 insertions(+), 13 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java index fe9440813b94f..92436a09a4e7e 100644 --- a/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java +++ b/server/src/main/java/org/opensearch/index/remote/RemoteSegmentTransferTracker.java @@ -301,12 +301,17 @@ public Map<String, Long> getLatestLocalFileNameLengthMap() { } /** - * Updates the latestLocalFileNameLengthMap by adding file name and it's size to the map. The method is given a function as an argument which is used for determining the file size (length in bytes). This method is also provided the collection of segment files which are the latest refresh local segment files. This method also removes the stale segment files from the map that are not part of the input segment files. + * Updates the latestLocalFileNameLengthMap by adding file name and it's size to the map. + * The method is given a function as an argument which is used for determining the file size (length in bytes). + * This method is also provided the collection of segment files which are the latest refresh local segment files. + * This method also removes the stale segment files from the map that are not part of the input segment files. * * @param segmentFiles list of local refreshed segment files * @param fileSizeFunction function is used to determine the file size in bytes + * + * @return updated map of local segment files and filesize */ - public void updateLatestLocalFileNameLengthMap( + public Map<String, Long> updateLatestLocalFileNameLengthMap( Collection<String> segmentFiles, CheckedFunction<String, Long, IOException> fileSizeFunction ) { @@ -332,6 +337,7 @@ public void updateLatestLocalFileNameLengthMap( // Remove keys from the fileSizeMap that do not exist in the latest segment files latestLocalFileNameLengthMap.entrySet().removeIf(entry -> fileSet.contains(entry.getKey()) == false); computeBytesLag(); + return Collections.unmodifiableMap(latestLocalFileNameLengthMap); } public void addToLatestUploadedFiles(String file) { diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index e96a4bb19a960..f75e86540ed9b 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -226,7 +226,9 @@ private boolean syncSegments() { Collection<String> localSegmentsPostRefresh = segmentInfos.files(true); // Create a map of file name to size and update the refresh segment tracker - updateLocalSizeMapAndTracker(localSegmentsPostRefresh); + Map<String, Long> localSegmentsSizeMap = updateLocalSizeMapAndTracker(localSegmentsPostRefresh).entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); CountDownLatch latch = new CountDownLatch(1); ActionListener<Void> segmentUploadsCompletedListener = new LatchedActionListener<>(new ActionListener<>() { @Override @@ -242,6 +244,7 @@ public void onResponse(Void unused) { refreshClockTimeMs, refreshSeqNo, lastRefreshedCheckpoint, + localSegmentsSizeMap, checkpoint ); // At this point since we have uploaded new segments, segment infos and segment metadata file, @@ -262,7 +265,7 @@ public void onFailure(Exception e) { }, latch); // Start the segments files upload - uploadNewSegments(localSegmentsPostRefresh, segmentUploadsCompletedListener); + uploadNewSegments(localSegmentsPostRefresh, localSegmentsSizeMap, segmentUploadsCompletedListener); latch.await(); } catch (EngineException e) { logger.warn("Exception while reading SegmentInfosSnapshot", e); @@ -306,10 +309,11 @@ private void onSuccessfulSegmentsSync( long refreshClockTimeMs, long refreshSeqNo, long lastRefreshedCheckpoint, + Map<String, Long> localFileSizeMap, ReplicationCheckpoint checkpoint ) { // Update latest uploaded segment files name in segment tracker - segmentTracker.setLatestUploadedFiles(segmentTracker.getLatestLocalFileNameLengthMap().keySet()); + segmentTracker.setLatestUploadedFiles(localFileSizeMap.keySet()); // Update the remote refresh time and refresh seq no updateRemoteRefreshTimeAndSeqNo(refreshTimeMs, refreshClockTimeMs, refreshSeqNo); // Reset the backoffDelayIterator for the future failures @@ -382,7 +386,11 @@ void uploadMetadata(Collection<String> localSegmentsPostRefresh, SegmentInfos se } } - private void uploadNewSegments(Collection<String> localSegmentsPostRefresh, ActionListener<Void> listener) { + private void uploadNewSegments( + Collection<String> localSegmentsPostRefresh, + Map<String, Long> localSegmentsSizeMap, + ActionListener<Void> listener + ) { Collection<String> filteredFiles = localSegmentsPostRefresh.stream().filter(file -> !skipUpload(file)).collect(Collectors.toList()); if (filteredFiles.size() == 0) { logger.debug("No new segments to upload in uploadNewSegments"); @@ -396,7 +404,7 @@ private void uploadNewSegments(Collection<String> localSegmentsPostRefresh, Acti for (String src : filteredFiles) { // Initializing listener here to ensure that the stats increment operations are thread-safe - UploadListener statsListener = createUploadListener(); + UploadListener statsListener = createUploadListener(localSegmentsSizeMap); ActionListener<Void> aggregatedListener = ActionListener.wrap(resp -> { statsListener.onSuccess(src); batchUploadListener.onResponse(resp); @@ -455,9 +463,11 @@ private void updateRemoteRefreshTimeAndSeqNo(long refreshTimeMs, long refreshClo * Updates map of file name to size of the input segment files in the segment tracker. Uses {@code storeDirectory.fileLength(file)} to get the size. * * @param segmentFiles list of segment files that are part of the most recent local refresh. + * + * @return updated map of local segment files and filesize */ - private void updateLocalSizeMapAndTracker(Collection<String> segmentFiles) { - segmentTracker.updateLatestLocalFileNameLengthMap(segmentFiles, storeDirectory::fileLength); + private Map<String, Long> updateLocalSizeMapAndTracker(Collection<String> segmentFiles) { + return segmentTracker.updateLatestLocalFileNameLengthMap(segmentFiles, storeDirectory::fileLength); } private void updateFinalStatusInSegmentTracker(boolean uploadStatus, long bytesBeforeUpload, long startTimeInNS) { @@ -532,22 +542,24 @@ private boolean isLocalOrSnapshotRecovery() { /** * Creates an {@link UploadListener} containing the stats population logic which would be triggered before and after segment upload events + * + * @param fileSizeMap updated map of current snapshot of local segments to their sizes */ - private UploadListener createUploadListener() { + private UploadListener createUploadListener(Map<String, Long> fileSizeMap) { return new UploadListener() { private long uploadStartTime = 0; @Override public void beforeUpload(String file) { // Start tracking the upload bytes started - segmentTracker.addUploadBytesStarted(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesStarted(fileSizeMap.get(file)); uploadStartTime = System.currentTimeMillis(); } @Override public void onSuccess(String file) { // Track upload success - segmentTracker.addUploadBytesSucceeded(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesSucceeded(fileSizeMap.get(file)); segmentTracker.addToLatestUploadedFiles(file); segmentTracker.addUploadTimeInMillis(Math.max(1, System.currentTimeMillis() - uploadStartTime)); } @@ -555,7 +567,7 @@ public void onSuccess(String file) { @Override public void onFailure(String file) { // Track upload failure - segmentTracker.addUploadBytesFailed(segmentTracker.getLatestLocalFileNameLengthMap().get(file)); + segmentTracker.addUploadBytesFailed(fileSizeMap.get(file)); segmentTracker.addUploadTimeInMillis(Math.max(1, System.currentTimeMillis() - uploadStartTime)); } }; From f55b9e0313815384f4f8483a3a9881acc7a5f100 Mon Sep 17 00:00:00 2001 From: Andrew Ross <andrross@amazon.com> Date: Fri, 2 Feb 2024 09:33:45 -0800 Subject: [PATCH 160/178] Fix race condition in SimpleNioTransportTests.testTracerLog (#12139) The test currently blocks on receiving a response to the request for the "internal:testNotSeen" action. However, that response is sent from TransportService before the trace logger [writes its log message][1]. Since the test was not polling for this "sent response" log message to appear that meant it was possible for the test to remove/stop the mock log appender concurrently with the logging of that final message. The fix is to include this final log message as an expectation, so the test will poll until this message appears and the logger should be quiescent when the appender is removed and stopped. [1]: https://github.com/opensearch-project/OpenSearch/blob/71f1fabe149fd0777edf44502ace4a8f0911feeb/server/src/main/java/org/opensearch/transport/TransportService.java#L1273 Signed-off-by: Andrew Ross <andrross@amazon.com> --- .../transport/AbstractSimpleTransportTestCase.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java index 3b64e044e7bf0..e43b0756e2f2b 100644 --- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java @@ -1283,9 +1283,17 @@ public String executor() { Level.TRACE, notSeenReceived ); + final String notSeenResponseSent = ".*\\[internal:testNotSeen].*sent response.*"; + final MockLogAppender.LoggingExpectation notSeenResponseSentExpectation = new MockLogAppender.PatternSeenEventExpectation( + "sent response", + "org.opensearch.transport.TransportService.tracer", + Level.TRACE, + notSeenResponseSent + ); appender.addExpectation(notSeenSentExpectation); appender.addExpectation(notSeenReceivedExpectation); + appender.addExpectation(notSeenResponseSentExpectation); PlainTransportFuture<StringMessageResponse> future = new PlainTransportFuture<>(noopResponseHandler); serviceA.sendRequest(nodeB, "internal:testNotSeen", new StringMessageRequest(""), future); From 24b0cc92eceb3fa8ade8ce4c10268bd60fe6fd64 Mon Sep 17 00:00:00 2001 From: Michael Froh <froh@amazon.com> Date: Fri, 2 Feb 2024 21:13:07 +0000 Subject: [PATCH 161/178] Allow composite aggregation under filter or reverse_nested parent (#11499) * Allow composite aggregation under filter parent Composite aggregations are able to run under a filter aggregation with no change required (other than not throwing an exception). Also cleaned up FilterAggregatorFactory a little. Signed-off-by: Michael Froh <froh@amazon.com> * Add changelog entry Signed-off-by: Michael Froh <froh@amazon.com> * Add support for reverse nested agg too Signed-off-by: Michael Froh <froh@amazon.com> * Add unit test coverage Signed-off-by: Michael Froh <froh@amazon.com> * Skip new tests in pre-3.0 mixed cluster Signed-off-by: Michael Froh <froh@amazon.com> --------- Signed-off-by: Michael Froh <froh@amazon.com> --- CHANGELOG.md | 1 + .../test/search.aggregation/230_composite.yml | 128 ++++++++++++++++++ .../CompositeAggregationBuilder.java | 18 ++- .../filter/FilterAggregatorFactory.java | 6 +- .../composite/CompositeAggregatorTests.java | 40 ++++++ .../BaseCompositeAggregatorTestCase.java | 18 ++- 6 files changed, 196 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e7fcf5ee6b35c..b8a95c74d3c08 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -208,6 +208,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Change error message when per shard document limit is breached ([#11312](https://github.com/opensearch-project/OpenSearch/pull/11312)) - Improve boolean parsing performance ([#11308](https://github.com/opensearch-project/OpenSearch/pull/11308)) - Interpret byte array as primitive using VarHandles ([#11362](https://github.com/opensearch-project/OpenSearch/pull/11362)) +- Allow composite aggregation to run under a parent filter aggregation ([#11499](https://github.com/opensearch-project/OpenSearch/pull/11499)) - Automatically add scheme to discovery.ec2.endpoint ([#11512](https://github.com/opensearch-project/OpenSearch/pull/11512)) - Restore support for Java 8 for RestClient ([#11562](https://github.com/opensearch-project/OpenSearch/pull/11562)) - Add deleted doc count in _cat/shards ([#11678](https://github.com/opensearch-project/OpenSearch/pull/11678)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index 09278690f5d05..2808be8cd7045 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -509,6 +509,134 @@ setup: - match: { aggregations.1.2.buckets.1.key.nested: 1000 } - match: { aggregations.1.2.buckets.1.doc_count: 1 } +--- +"Composite aggregation with filtered nested parent": + - skip: + version: " - 2.99.99" + reason: fixed in 3.0.0 + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 100 + aggs: + 3: + composite: + sources: [ + "nested": { + "terms": { + "field": "nested.nested_long" + } + } + ] + + - match: {hits.total: 6} + - length: { aggregations.1.2.3.buckets: 2 } + - match: { aggregations.1.2.3.buckets.0.key.nested: 10 } + - match: { aggregations.1.2.3.buckets.0.doc_count: 2 } + - match: { aggregations.1.2.3.buckets.1.key.nested: 20 } + - match: { aggregations.1.2.3.buckets.1.doc_count: 2 } + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 100 + aggs: + 3: + composite: + after: { "nested": 10 } + sources: [ + "nested": { + "terms": { + "field": "nested.nested_long" + } + } + ] + - match: {hits.total: 6} + - length: { aggregations.1.2.3.buckets: 1 } + - match: { aggregations.1.2.3.buckets.0.key.nested: 20 } + - match: { aggregations.1.2.3.buckets.0.doc_count: 2 } + +--- +"Composite aggregation with filtered reverse nested parent": + - skip: + version: " - 2.99.99" + reason: fixed in 3.0.0 + - do: + search: + rest_total_hits_as_int: true + index: test + body: + aggregations: + 1: + nested: + path: nested + aggs: + 2: + filter: + range: + nested.nested_long: + gt: 0 + lt: 20 + aggs: + 3: + reverse_nested: {} + aggs: + 4: + composite: + sources: [ + { + "long": { + "terms": { + "field": "long" + } + } + }, + { + "kw": { + "terms": { + "field": "keyword" + } + } + } + ] + - match: {hits.total: 6} + - length: { aggregations.1.2.3.4.buckets: 4 } + - match: { aggregations.1.2.3.4.buckets.0.key.long: 0 } + - match: { aggregations.1.2.3.4.buckets.0.key.kw: "bar" } + - match: { aggregations.1.2.3.4.buckets.0.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.1.key.long: 10 } + - match: { aggregations.1.2.3.4.buckets.1.key.kw: "foo" } + - match: { aggregations.1.2.3.4.buckets.1.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.2.key.long: 20 } + - match: { aggregations.1.2.3.4.buckets.2.key.kw: "foo" } + - match: { aggregations.1.2.3.4.buckets.2.doc_count: 1 } + - match: { aggregations.1.2.3.4.buckets.3.key.long: 100 } + - match: { aggregations.1.2.3.4.buckets.3.key.kw: "bar" } + - match: { aggregations.1.2.3.4.buckets.3.doc_count: 1 } + --- "Composite aggregation with unmapped field": - skip: diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java index 5e8791441d83a..e57acba5bc0ad 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregationBuilder.java @@ -44,7 +44,9 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.bucket.filter.FilterAggregatorFactory; import org.opensearch.search.aggregations.bucket.nested.NestedAggregatorFactory; +import org.opensearch.search.aggregations.bucket.nested.ReverseNestedAggregatorFactory; import org.opensearch.search.aggregations.support.ValuesSourceRegistry; import java.io.IOException; @@ -240,14 +242,16 @@ public BucketCardinality bucketCardinality() { * this aggregator or the instance of the parent's factory that is incompatible with * the composite aggregation. */ - private AggregatorFactory checkParentIsNullOrNested(AggregatorFactory factory) { + private static AggregatorFactory checkParentIsSafe(AggregatorFactory factory) { if (factory == null) { return null; - } else if (factory instanceof NestedAggregatorFactory) { - return checkParentIsNullOrNested(factory.getParent()); - } else { - return factory; - } + } else if (factory instanceof NestedAggregatorFactory + || factory instanceof FilterAggregatorFactory + || factory instanceof ReverseNestedAggregatorFactory) { + return checkParentIsSafe(factory.getParent()); + } else { + return factory; + } } private static void validateSources(List<CompositeValuesSourceBuilder<?>> sources) { @@ -278,7 +282,7 @@ protected AggregatorFactory doBuild( AggregatorFactory parent, AggregatorFactories.Builder subfactoriesBuilder ) throws IOException { - AggregatorFactory invalid = checkParentIsNullOrNested(parent); + AggregatorFactory invalid = checkParentIsSafe(parent); if (invalid != null) { throw new IllegalArgumentException( "[composite] aggregation cannot be used with a parent aggregation of" diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java index a0a636c121e12..db21b384c77ea 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/filter/FilterAggregatorFactory.java @@ -56,7 +56,7 @@ public class FilterAggregatorFactory extends AggregatorFactory { private Weight weight; - private Query filter; + private final Query filter; public FilterAggregatorFactory( String name, @@ -85,7 +85,7 @@ public Weight getWeight() { try { weight = contextSearcher.createWeight(contextSearcher.rewrite(filter), ScoreMode.COMPLETE_NO_SCORES, 1f); } catch (IOException e) { - throw new AggregationInitializationException("Failed to initialse filter", e); + throw new AggregationInitializationException("Failed to initialise filter", e); } } return weight; @@ -98,7 +98,7 @@ public Aggregator createInternal( CardinalityUpperBound cardinality, Map<String, Object> metadata ) throws IOException { - return new FilterAggregator(name, () -> this.getWeight(), factories, searchContext, parent, cardinality, metadata); + return new FilterAggregator(name, this::getWeight, factories, searchContext, parent, cardinality, metadata); } @Override diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index b581e552fec4f..bbe27eb573b64 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -38,7 +38,10 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.TermQuery; import org.opensearch.OpenSearchParseException; +import org.opensearch.index.query.MatchAllQueryBuilder; +import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.bucket.filter.FilterAggregationBuilder; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.missing.MissingOrder; import org.opensearch.search.aggregations.bucket.terms.StringTerms; @@ -2460,4 +2463,41 @@ public void testIndexSortWithDuplicate() throws Exception { ); } } + + public void testUnderFilterAggregator() throws IOException { + executeTestCase(false, false, new MatchAllDocsQuery(), Collections.emptyList(), () -> { + FilterAggregationBuilder filterAggregatorBuilder = new FilterAggregationBuilder( + "filter_mcmilterface", + new MatchAllQueryBuilder() + ); + filterAggregatorBuilder.subAggregation( + new CompositeAggregationBuilder( + "compo", + Collections.singletonList(new TermsValuesSourceBuilder("keyword").field("keyword")) + ) + ); + return filterAggregatorBuilder; + }, (ic) -> {}); + } + + public void testUnderBucketAggregator() throws IOException { + try { + executeTestCase(false, false, new MatchAllDocsQuery(), Collections.emptyList(), () -> { + TermsAggregationBuilder termsAggregationBuilder = AggregationBuilders.terms("terms").field("keyword"); + termsAggregationBuilder.subAggregation( + new CompositeAggregationBuilder( + "compo", + Collections.singletonList(new TermsValuesSourceBuilder("keyword").field("keyword")) + ) + ); + return termsAggregationBuilder; + }, (ic) -> {}); + fail("Should have thrown an IllegalArgumentException"); + } catch (IllegalArgumentException iae) { + assertTrue( + iae.getMessage() + .contains("[composite] aggregation cannot be used with a parent aggregation of type: [TermsAggregatorFactory]") + ); + } + } } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java index 6b5ec838f401d..16abf2e255b5d 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/composite/BaseCompositeAggregatorTestCase.java @@ -46,7 +46,9 @@ import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.NumberFieldMapper; +import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.opensearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; import org.opensearch.search.aggregations.bucket.composite.InternalComposite; @@ -139,12 +141,16 @@ protected void executeTestCase( boolean useIndexSort, Query query, List<Map<String, List<Object>>> dataset, - Supplier<CompositeAggregationBuilder> create, + Supplier<? extends AggregationBuilder> create, Consumer<InternalComposite> verify ) throws IOException { Map<String, MappedFieldType> types = FIELD_TYPES.stream().collect(Collectors.toMap(MappedFieldType::name, Function.identity())); - CompositeAggregationBuilder aggregationBuilder = create.get(); - Sort indexSort = useIndexSort ? buildIndexSort(aggregationBuilder.sources(), types) : null; + AggregationBuilder aggregationBuilder = create.get(); + Sort indexSort = null; + if (aggregationBuilder instanceof CompositeAggregationBuilder && useIndexSort) { + CompositeAggregationBuilder cab = (CompositeAggregationBuilder) aggregationBuilder; + indexSort = buildIndexSort(cab.sources(), types); + } IndexSettings indexSettings = createIndexSettings(indexSort); try (Directory directory = newDirectory()) { IndexWriterConfig config = newIndexWriterConfig(random(), new MockAnalyzer(random())); @@ -180,14 +186,16 @@ protected void executeTestCase( } try (IndexReader indexReader = DirectoryReader.open(directory)) { IndexSearcher indexSearcher = new IndexSearcher(indexReader); - InternalComposite composite = searchAndReduce( + InternalAggregation aggregation = searchAndReduce( indexSettings, indexSearcher, query, aggregationBuilder, FIELD_TYPES.toArray(new MappedFieldType[0]) ); - verify.accept(composite); + if (aggregation instanceof InternalComposite) { + verify.accept((InternalComposite) aggregation); + } } } } From 26a66f0749164f3c988ea228ac3913eb9e81e9c3 Mon Sep 17 00:00:00 2001 From: Jay Deng <jayd0104@gmail.com> Date: Fri, 2 Feb 2024 14:10:27 -0800 Subject: [PATCH 162/178] Remove Concurrent Segment Search feature flags for feature launch (#12074) Signed-off-by: Jay Deng <jayd0104@gmail.com> --- CHANGELOG.md | 1 + distribution/src/config/opensearch.yml | 6 -- .../common/QueryStringWithAnalyzersIT.java | 6 -- .../common/HighlighterWithAnalyzersTests.java | 6 -- .../geo/GeoModulePluginIntegTestCase.java | 6 -- .../script/expression/MoreExpressionIT.java | 6 -- .../script/expression/StoredExpressionIT.java | 6 -- .../mustache/MultiSearchTemplateIT.java | 6 -- .../join/aggregations/ChildrenIT.java | 6 -- .../join/aggregations/ParentIT.java | 6 -- .../join/query/ChildQuerySearchIT.java | 6 -- .../opensearch/join/query/InnerHitsIT.java | 6 -- .../percolator/PercolatorQuerySearchIT.java | 6 -- .../index/rankeval/RankEvalRequestIT.java | 6 -- .../test/cat.shards/10_basic.yml | 10 ++- .../test/cat.thread_pool/10_basic.yml | 28 +++++++++ .../node/tasks/CancellableTasksIT.java | 6 -- .../node/tasks/ConcurrentSearchTasksIT.java | 4 +- .../org/opensearch/index/IndexSortIT.java | 6 -- .../index/search/MatchPhraseQueryIT.java | 6 -- .../index/suggest/stats/SuggestStatsIT.java | 6 -- .../indices/IndicesRequestCacheIT.java | 6 -- .../breaker/CircuitBreakerServiceIT.java | 6 -- .../indices/stats/IndexStatsIT.java | 6 -- .../org/opensearch/mget/SimpleMgetIT.java | 6 -- .../org/opensearch/script/ScriptCacheIT.java | 6 -- .../search/SearchCancellationIT.java | 6 -- .../opensearch/search/SearchTimeoutIT.java | 6 -- .../search/SearchWithRejectionsIT.java | 6 -- .../search/StressSearchServiceReaperIT.java | 6 -- .../AggregationsIntegrationIT.java | 6 -- .../search/aggregations/CombiIT.java | 6 -- .../search/aggregations/EquivalenceIT.java | 6 -- .../search/aggregations/MetadataIT.java | 6 -- .../search/aggregations/MissingValueIT.java | 6 -- .../bucket/AdjacencyMatrixIT.java | 6 -- .../aggregations/bucket/BooleanTermsIT.java | 6 -- .../aggregations/bucket/DateHistogramIT.java | 6 -- .../bucket/DateHistogramOffsetIT.java | 6 -- .../aggregations/bucket/DateRangeIT.java | 6 -- .../bucket/DiversifiedSamplerIT.java | 6 -- .../search/aggregations/bucket/FilterIT.java | 6 -- .../search/aggregations/bucket/FiltersIT.java | 6 -- .../aggregations/bucket/GeoDistanceIT.java | 6 -- .../search/aggregations/bucket/GlobalIT.java | 6 -- .../aggregations/bucket/HistogramIT.java | 6 -- .../search/aggregations/bucket/IpRangeIT.java | 6 -- .../aggregations/bucket/NaNSortingIT.java | 6 -- .../search/aggregations/bucket/NestedIT.java | 6 -- .../search/aggregations/bucket/RangeIT.java | 6 -- .../aggregations/bucket/ReverseNestedIT.java | 6 -- .../search/aggregations/bucket/SamplerIT.java | 6 -- .../aggregations/bucket/ShardReduceIT.java | 6 -- .../SignificantTermsSignificanceScoreIT.java | 6 -- .../bucket/TermsDocCountErrorIT.java | 6 -- .../bucket/TermsFixedDocCountErrorIT.java | 6 -- .../bucket/TermsShardMinDocCountIT.java | 6 -- .../aggregations/metrics/CardinalityIT.java | 6 -- .../CardinalityWithRequestBreakerIT.java | 6 -- .../metrics/ScriptedMetricIT.java | 6 -- .../aggregations/metrics/TopHitsIT.java | 6 -- .../aggregations/metrics/ValueCountIT.java | 6 -- .../aggregations/pipeline/AvgBucketIT.java | 6 -- .../aggregations/pipeline/BucketScriptIT.java | 6 -- .../pipeline/BucketSelectorIT.java | 6 -- .../aggregations/pipeline/BucketSortIT.java | 6 -- .../pipeline/DateDerivativeIT.java | 6 -- .../aggregations/pipeline/DerivativeIT.java | 6 -- .../pipeline/ExtendedStatsBucketIT.java | 6 -- .../aggregations/pipeline/MaxBucketIT.java | 6 -- .../aggregations/pipeline/MinBucketIT.java | 6 -- .../aggregations/pipeline/MovAvgIT.java | 6 -- .../pipeline/PercentilesBucketIT.java | 6 -- .../aggregations/pipeline/SerialDiffIT.java | 6 -- .../aggregations/pipeline/StatsBucketIT.java | 6 -- .../aggregations/pipeline/SumBucketIT.java | 6 -- .../backpressure/SearchBackpressureIT.java | 6 -- .../search/basic/SearchRedStateIndexIT.java | 6 -- .../basic/SearchWhileCreatingIndexIT.java | 6 -- .../search/basic/SearchWhileRelocatingIT.java | 6 -- .../basic/SearchWithRandomExceptionsIT.java | 6 -- .../basic/SearchWithRandomIOExceptionsIT.java | 6 -- .../basic/TransportSearchFailuresIT.java | 6 -- .../basic/TransportTwoNodesSearchIT.java | 6 -- .../search/fetch/FetchSubPhasePluginIT.java | 6 -- .../search/fetch/subphase/InnerHitsIT.java | 6 -- .../fetch/subphase/MatchedQueriesIT.java | 6 -- .../highlight/CustomHighlighterSearchIT.java | 6 -- .../highlight/HighlighterSearchIT.java | 6 -- .../search/fieldcaps/FieldCapabilitiesIT.java | 6 -- .../search/fields/SearchFieldsIT.java | 6 -- .../functionscore/DecayFunctionScoreIT.java | 6 -- .../functionscore/ExplainableScriptIT.java | 6 -- .../FunctionScoreFieldValueIT.java | 6 -- .../search/functionscore/FunctionScoreIT.java | 6 -- .../functionscore/FunctionScorePluginIT.java | 6 -- .../search/functionscore/QueryRescorerIT.java | 6 -- .../functionscore/RandomScoreFunctionIT.java | 6 -- .../opensearch/search/geo/GeoFilterIT.java | 6 -- .../opensearch/search/geo/GeoPolygonIT.java | 6 -- .../search/geo/GeoShapeIntegrationIT.java | 6 -- .../geo/LegacyGeoShapeIntegrationIT.java | 6 -- .../search/morelikethis/MoreLikeThisIT.java | 6 -- .../search/msearch/MultiSearchIT.java | 6 -- .../search/nested/SimpleNestedExplainIT.java | 6 -- .../search/nested/SimpleNestedIT.java | 6 -- .../opensearch/search/pit/PitMultiNodeIT.java | 6 -- .../search/preference/SearchPreferenceIT.java | 6 -- .../aggregation/AggregationProfilerIT.java | 6 -- .../search/profile/query/QueryProfilerIT.java | 6 -- .../org/opensearch/search/query/ExistsIT.java | 6 -- .../search/query/MultiMatchQueryIT.java | 6 -- .../search/query/QueryStringIT.java | 6 -- .../search/query/ScriptScoreQueryIT.java | 6 -- .../search/query/SearchQueryIT.java | 6 -- .../search/query/SimpleQueryStringIT.java | 6 -- .../scriptfilter/ScriptQuerySearchIT.java | 6 -- .../search/scroll/DuelScrollIT.java | 6 -- .../search/scroll/SearchScrollIT.java | 6 -- .../SearchScrollWithFailingNodesIT.java | 6 -- .../search/searchafter/SearchAfterIT.java | 6 -- .../search/simple/SimpleSearchIT.java | 6 -- .../search/slice/SearchSliceIT.java | 6 -- .../opensearch/search/sort/FieldSortIT.java | 6 -- .../opensearch/search/sort/GeoDistanceIT.java | 6 -- .../search/sort/GeoDistanceSortBuilderIT.java | 6 -- .../opensearch/search/sort/SimpleSortIT.java | 6 -- .../search/sort/SortFromPluginIT.java | 6 -- .../search/source/MetadataFetchingIT.java | 6 -- .../search/source/SourceFetchingIT.java | 6 -- .../search/stats/ConcurrentSearchStatsIT.java | 7 +-- .../search/stats/SearchStatsIT.java | 6 -- .../suggest/CompletionSuggestSearchIT.java | 6 -- .../ContextCompletionSuggestSearchIT.java | 6 -- .../search/suggest/SuggestSearchIT.java | 6 -- .../opensearch/similarity/SimilarityIT.java | 6 -- .../common/settings/ClusterSettings.java | 11 ++-- .../common/settings/FeatureFlagSettings.java | 1 - .../common/settings/IndexScopedSettings.java | 9 ++- .../opensearch/common/util/FeatureFlags.java | 12 ---- .../index/search/stats/SearchStats.java | 11 ++-- .../rest/action/cat/RestIndicesAction.java | 61 +++++++++---------- .../rest/action/cat/RestNodesAction.java | 45 ++++++-------- .../rest/action/cat/RestShardsAction.java | 45 ++++++-------- .../search/DefaultSearchContext.java | 5 +- .../org/opensearch/search/SearchModule.java | 3 +- .../org/opensearch/search/SearchService.java | 2 +- .../query/QueryPhaseSearcherWrapper.java | 12 +--- .../org/opensearch/threadpool/ThreadPool.java | 15 ++--- .../AbstractTermVectorsTestCase.java | 6 -- .../common/settings/SettingsModuleTests.java | 56 +++-------------- .../action/cat/RestShardsActionTests.java | 8 +-- .../search/DefaultSearchContextTests.java | 4 -- .../opensearch/search/SearchModuleTests.java | 19 +----- .../opensearch/search/SearchServiceTests.java | 8 +-- .../bucket/ShardSizeTestCase.java | 6 -- .../metrics/AbstractGeoTestCase.java | 6 -- .../DelayedShardAggregationIT.java | 6 -- .../bucket/AbstractTermsTestCase.java | 6 -- .../metrics/AbstractNumericTestCase.java | 6 -- .../test/OpenSearchIntegTestCase.java | 8 +-- .../test/OpenSearchSingleNodeTestCase.java | 8 +-- ...edDynamicSettingsOpenSearchIntegTests.java | 6 -- ...zedStaticSettingsOpenSearchIntegTests.java | 6 -- ...edDynamicSettingsOpenSearchIntegTests.java | 6 -- ...zedStaticSettingsOpenSearchIntegTests.java | 6 -- ...edDynamicSettingsOpenSearchIntegTests.java | 6 -- ...zedStaticSettingsOpenSearchIntegTests.java | 6 -- 168 files changed, 150 insertions(+), 1101 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b8a95c74d3c08..263ec57e856cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -136,6 +136,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) - New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) - Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) +- Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/distribution/src/config/opensearch.yml b/distribution/src/config/opensearch.yml index b7ab2e1c2309b..ebffdde0f3699 100644 --- a/distribution/src/config/opensearch.yml +++ b/distribution/src/config/opensearch.yml @@ -117,12 +117,6 @@ ${path.logs} #opensearch.experimental.feature.extensions.enabled: false # # -# Gates the concurrent segment search feature. This feature enables concurrent segment search in a separate -# index searcher threadpool. -# -#opensearch.experimental.feature.concurrent_segment_search.enabled: false -# -# # Gates the optimization of datetime formatters caching along with change in default datetime formatter # Once there is no observed impact on performance, this feature flag can be removed. # diff --git a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java index 3f9e15005fabb..648536f9136a8 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/opensearch/analysis/common/QueryStringWithAnalyzersIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.Operator; import org.opensearch.plugins.Plugin; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CommonAnalysisModulePlugin.class); diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java index 3e93060eaaa90..e55c1c69b2e40 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CommonAnalysisModulePlugin.class); diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index 65fadd211050d..6afd5c4ca75c1 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -11,7 +11,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.geometry.utils.StandardValidator; import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.index.mapper.GeoShapeFieldMapper; @@ -47,11 +46,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this * geo plugin is not getting packaged in a zip, we need to load it before the tests run. diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java index 6a1d9d57dd435..8e15488900e5f 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/MoreExpressionIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.update.UpdateRequestBuilder; import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; @@ -94,11 +93,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ExpressionModulePlugin.class); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java index f5cd115e43df5..7465fa1e5ddbe 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/opensearch/script/expression/StoredExpressionIT.java @@ -35,7 +35,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.plugins.Plugin; @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)); diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java index 204be6ccf1a2c..f7abc220e75d8 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/opensearch/script/mustache/MultiSearchTemplateIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexNotFoundException; import org.opensearch.plugins.Plugin; import org.opensearch.script.ScriptType; @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(MustacheModulePlugin.class); diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java index 5fc0a202ae45e..ab74463382aaa 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ChildrenIT.java @@ -41,7 +41,6 @@ import org.opensearch.client.Requests; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.aggregations.InternalAggregation; @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testChildrenAggs() throws Exception { SearchResponse searchResponse = client().prepareSearch("test") .setQuery(matchQuery("randomized", true)) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java index 04703a65aa19d..4a6157e388777 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/aggregations/ParentIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.bucket.MultiBucketsAggregation; import org.opensearch.search.aggregations.bucket.terms.Terms; @@ -76,11 +75,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleParentAgg() throws Exception { final SearchRequestBuilder searchRequest = client().prepareSearch("test") .setSize(10000) diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index c43d6352b26f8..99527c3273c4b 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -44,7 +44,6 @@ import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.IdsQueryBuilder; @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testMultiLevelChild() throws Exception { assertAcked( prepareCreate("test").setMapping( diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java index 39da86c7fd726..4b5470d17c100 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/InnerHitsIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.InnerHitBuilder; @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { ArrayList<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); diff --git a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java index 3f5b7e9defe30..01436404e8a85 100644 --- a/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/internalClusterTest/java/org/opensearch/percolator/PercolatorQuerySearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.bytes.BytesArray; @@ -104,11 +103,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean addMockGeoShapeFieldMapper() { return false; diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index b5cf9476b2068..488c2e33648e7 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(RankEvalModulePlugin.class); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml index b572ed9e62ea9..91521029e2d31 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml @@ -1,13 +1,13 @@ "Help": - skip: - version: " - 2.11.99" - reason: deleted docs added in 2.12.0 + version: " - 2.99.99" + reason: deleted docs and concurrent search are added in 2.12.0 features: node_selector - do: cat.shards: help: true node_selector: - version: "2.12.0 - " + version: "3.0.0 - " - match: $body: | @@ -66,6 +66,10 @@ search.query_current .+ \n search.query_time .+ \n search.query_total .+ \n + search.concurrent_query_current .+ \n + search.concurrent_query_time .+ \n + search.concurrent_query_total .+ \n + search.concurrent_avg_slice_count .+ \n search.scroll_current .+ \n search.scroll_time .+ \n search.scroll_total .+ \n diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml index 00ec838489f63..1b8ffbf762b8f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml @@ -11,6 +11,33 @@ / #node_name name active queue rejected ^ (\S+ \s+ \S+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + - do: + cat.thread_pool: + thread_pool_patterns: search,search_throttled,generic + h: name,total_wait_time,twt + v: true + + - match: + $body: | + /^ name \s+ total_wait_time \s+ twt \n + (generic \s+ -1 \s+ -1 \n + search \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n + search_throttled \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n)+ $/ + +--- +"Test cat thread_pool total_wait_time output with concurrent search thread_pool": + - skip: + version: " - 2.99.99" + reason: index_search thread_pool was introduced in V_2.12.0 + + - do: + cat.thread_pool: {} + + - match: + $body: | + / #node_name name active queue rejected + ^ (\S+ \s+ \S+ \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + - do: cat.thread_pool: thread_pool_patterns: search,search_throttled,index_searcher,generic @@ -21,6 +48,7 @@ $body: | /^ name \s+ total_wait_time \s+ twt \n (generic \s+ -1 \s+ -1 \n + index_searcher \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n search \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n search_throttled \s+ \d*\.*\d*\D+ \s+ \d*\.*\d*\D+ \n)+ $/ diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java index 6a769c6c49d18..bdb36b62ada21 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/CancellableTasksIT.java @@ -52,7 +52,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.AbstractRunnable; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.common.util.set.Sets; @@ -119,11 +118,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void resetTestStates() { idGenerator = 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java index e6fd9139d45f2..8b3c40c43e2d2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/ConcurrentSearchTasksIT.java @@ -15,9 +15,9 @@ import org.opensearch.common.settings.FeatureFlagSettings; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.tasks.resourcetracker.ThreadResourceInfo; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.search.SearchService; import org.opensearch.tasks.TaskInfo; import org.hamcrest.MatcherAssert; @@ -44,6 +44,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(super.nodeSettings(nodeOrdinal)) .put("thread_pool.index_searcher.size", INDEX_SEARCHER_THREADS) .put("thread_pool.index_searcher.queue_size", INDEX_SEARCHER_THREADS) + .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true) .build(); } @@ -66,7 +67,6 @@ protected Settings featureFlagSettings() { for (Setting builtInFlag : FeatureFlagSettings.BUILT_IN_FEATURE_FLAGS) { featureSettings.put(builtInFlag.getKey(), builtInFlag.getDefaultRaw(Settings.EMPTY)); } - featureSettings.put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true); return featureSettings.build(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java index fcf2852a4c447..369c9f9b1a653 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/IndexSortIT.java @@ -39,7 +39,6 @@ import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSortField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static XContentBuilder createTestMapping() { try { return jsonBuilder().startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java index 0c57c442be096..df423edeca9c1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/search/MatchPhraseQueryIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.MatchPhraseQueryBuilder; import org.opensearch.index.search.MatchQuery.ZeroTermsQuery; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setUp() throws Exception { super.setUp(); diff --git a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java index 5db97fa92bbeb..a1ff2da249d69 100644 --- a/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/index/suggest/stats/SuggestStatsIT.java @@ -44,7 +44,6 @@ import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.search.stats.SearchStats; import org.opensearch.search.suggest.SuggestBuilder; import org.opensearch.search.suggest.phrase.PhraseSuggestionBuilder; @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfReplicas() { return 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java index 3862d5b2ab4ed..95f6a7a03266c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/IndicesRequestCacheIT.java @@ -42,7 +42,6 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.cache.request.RequestCacheStats; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.bucket.global.GlobalAggregationBuilder; @@ -82,11 +81,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // One of the primary purposes of the query cache is to cache aggs results public void testCacheAggs() throws Exception { Client client = client(); diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java index eef1582303fa6..73e888eea362c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/memory/breaker/CircuitBreakerServiceIT.java @@ -50,7 +50,6 @@ import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.breaker.CircuitBreaker; import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.breaker.NoopCircuitBreaker; @@ -107,11 +106,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java index 6db88cdee3576..21ab6e8a4f018 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/stats/IndexStatsIT.java @@ -57,7 +57,6 @@ import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.support.DefaultShardOperationFailedException; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamOutput; @@ -135,11 +134,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java index d3a105f787a90..2f0d4959d217b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/mget/SimpleMgetIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.search.fetch.subphase.FetchSourceContext; @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testThatMgetShouldWorkWithOneIndexMissing() throws IOException { createIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java index 05263b1936584..42c257eb79eff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/script/ScriptCacheIT.java @@ -12,7 +12,6 @@ import org.opensearch.OpenSearchException; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.index.MockEngineFactoryPlugin; @@ -51,11 +50,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder() .put(super.nodeSettings(nodeOrdinal)) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java index 3d4645cfb3694..5a19e2b841c08 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchCancellationIT.java @@ -50,7 +50,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.tasks.TaskCancelledException; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ScriptedBlockPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java index 64ce8e75857fa..ef7da395d2151 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchTimeoutIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(ScriptedTimeoutPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java index 013a0eaa557ac..b7f71b00d802f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/SearchWithRejectionsIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java index 49c934c4f855a..7ed3526cabe3f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/StressSearchServiceReaperIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { // very frequent checks diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java index b7f2e2ac57336..6059abce53c8b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/AggregationsIntegrationIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; @@ -83,11 +82,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(prepareCreate("index").setMapping("f", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java index 87328c6e11c65..1826dd69cd804 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/CombiIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.missing.Missing; @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Making sure that if there are multiple aggregations, working on the same field, yet require different * value source type, they can all still work. It used to fail as we used to cache the ValueSource by the diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java index 9fd9a99e15018..302ec3116d187 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/EquivalenceIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.query.RangeQueryBuilder; @@ -108,11 +107,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java index 8c7e2cb006d28..b650855083eed 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MetadataIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.metrics.Sum; import org.opensearch.search.aggregations.pipeline.InternalBucketMetricValue; @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testMetadataSetOnAggregationResult() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("name", "type=keyword").get()); IndexRequestBuilder[] builders = new IndexRequestBuilder[randomInt(30)]; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java index 28a3a2b3c520f..bdd16c7e74dc0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/MissingValueIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.terms.Terms; @@ -79,11 +78,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int maximumNumberOfShards() { return 2; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java index 4d97925a769dd..557ec9a37978d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/AdjacencyMatrixIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java index 270240de1fb47..9a1efb3336212 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/BooleanTermsIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; @@ -72,11 +71,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index a6434136644ef..64c9c792b866a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.index.query.MatchNoneQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(String date) { return DateFormatters.from(DateFieldMapper.getDefaultDateTimeFormatter().parse(date)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java index dab6683ceb324..eea896e01afe1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -38,7 +38,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.mapper.DateFieldMapper; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; import org.opensearch.search.aggregations.bucket.histogram.Histogram; @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(String date) { return DateFormatters.from(DateFieldMapper.getDefaultDateTimeFormatter().parse(date)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java index 392253626089f..f00b601a54b80 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateRangeIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static IndexRequestBuilder indexDoc(int month, int day, int value) throws Exception { return client().prepareIndex("idx") .setSource( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java index 7046c7e149f80..b62e5f0f7f3b0 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DiversifiedSamplerIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; @@ -87,11 +86,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SamplerAggregator.ExecutionMode.values()).toString(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java index 86c3051173a2d..2863711d49580 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FilterIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java index e0b1fb47823c1..e64877a1d4030 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/FiltersIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.BoolQueryBuilder; import org.opensearch.index.query.QueryBuilder; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java index 8bf20d8fec34f..ed0bd3aad5bab 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GeoDistanceIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.InternalAggregation; @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { XContentBuilder source = jsonBuilder().startObject().field("city", name); source.startArray("location"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java index 395ff8fe1bc4b..a4aea6096a6e4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/GlobalIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.bucket.global.Global; @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java index fbe1002b3a76a..4abd068d6fe37 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/HistogramIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -114,11 +113,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java index 969830e822968..44789ea63f536 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/IpRangeIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; import org.opensearch.script.Script; @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class DummyScriptPlugin extends MockScriptPlugin { @Override public Map<String, Function<Map<String, Object>, Object>> pluginScripts() { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java index 1c627530d195a..3eb813dcb91ef 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NaNSortingIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.Comparators; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregation; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -151,11 +150,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("string_value", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java index cd64bbf978786..288d4d2c4e525 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/NestedIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -110,11 +109,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java index 20a0345dba127..50cee4e9ecd92 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/RangeIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; @@ -98,11 +97,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java index b0258b035f83c..3bf9233d3441d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ReverseNestedIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; @@ -90,11 +89,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java index 692b45b16065b..3decab92acbff 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SamplerIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.sampler.Sampler; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java index 4b0fb436a0cf2..4cab6deb08bb5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/ShardReduceIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.geometry.utils.Geohash; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private IndexRequestBuilder indexDoc(String date, int value) throws Exception { return client().prepareIndex("idx") .setSource( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java index e11bffaf66eb1..f2e9265fa5cf9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/SignificantTermsSignificanceScoreIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(TestScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java index 7431734b8d5f5..add6b71cb1753 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsDocCountErrorIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.terms.Terms; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping(STRING_FIELD_NAME, "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java index 9f59167fe2d42..422af15d2881d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsFixedDocCountErrorIT.java @@ -14,7 +14,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -46,11 +45,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleAggErrorMultiShard() throws Exception { // size = 1, shard_size = 2 // Shard_1 [A, A, A, A, B, B, C, C, D, D] -> Buckets {"A" : 4, "B" : 2} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java index 48be94ba9b7cc..1cc250c00dba9 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/TermsShardMinDocCountIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.BucketOrder; @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static String randomExecutionHint() { return randomBoolean() ? null : randomFrom(SignificantTermsAggregatorFactory.ExecutionMode.values()).toString(); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java index 977ce3df8b3d1..db4ee3571d141 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java index e00fd7516789c..8122304ba992c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/CardinalityWithRequestBreakerIT.java @@ -38,7 +38,6 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.indices.breaker.HierarchyCircuitBreakerService; import org.opensearch.search.aggregations.Aggregator; @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Test that searches using cardinality aggregations returns all request breaker memory. */ diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java index a097268cd1d67..1725aa7847d72 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index 59a0aea4dae34..5d84452998e40 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.common.document.DocumentField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.MatchAllQueryBuilder; @@ -128,11 +127,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java index 4c23f2e7666fa..4610281c4b8a8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/ValueCountIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; @@ -87,11 +86,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java index 77008afdf1a0a..48fd06bac285b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/AvgBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java index 5262a35d9d135..1b22cf2018d96 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketScriptIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -102,11 +101,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java index f221349d7ec82..7dca1d0d79b1e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSelectorIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -101,11 +100,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java index f1f29bfb9541d..ffb607866935b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/BucketSortIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.bucket.histogram.Histogram; @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex(INDEX, INDEX_WITH_GAPS); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java index e77245e6391a1..8c89c1232ebb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DateDerivativeIT.java @@ -39,7 +39,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.InternalMultiBucketAggregation; import org.opensearch.search.aggregations.bucket.histogram.DateHistogramInterval; @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ZonedDateTime date(int month, int day) { return ZonedDateTime.of(2012, month, day, 0, 0, 0, 0, ZoneOffset.UTC); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java index 14b56787e6e7c..f8def40ec003a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/DerivativeIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.aggregations.InternalAggregation; @@ -112,11 +111,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java index b1a59525ab900..1bd04cc13268f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java index eb9397cfbbe98..ea6fcbd6a1560 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MaxBucketIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -103,11 +102,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java index 0a00d59ea76b4..44d12436382f6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MinBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java index a4f8e16e13fc3..d35b80b7918fe 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/MovAvgIT.java @@ -43,7 +43,6 @@ import org.opensearch.client.Client; import org.opensearch.common.collect.EvictingQueue; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -145,11 +144,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { prepareCreate("idx").setMapping( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java index 079b7a1d12252..29cb334bfcd00 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; @@ -92,11 +91,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java index 765709940611b..507bff51f0e39 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SerialDiffIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.collect.EvictingQueue; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.opensearch.search.aggregations.support.ValuesSourceAggregationBuilder; @@ -110,11 +109,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private ValuesSourceAggregationBuilder<? extends ValuesSourceAggregationBuilder<?>> randomMetric(String name, String field) { int rand = randomIntBetween(0, 3); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java index d4c9b654ad4e3..fbaf799871c8a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/StatsBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java index 9f8fa83f3d4fe..a5967124ff921 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/pipeline/SumBucketIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.bucket.histogram.Histogram; import org.opensearch.search.aggregations.bucket.histogram.Histogram.Bucket; @@ -89,11 +88,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { assertAcked(client().admin().indices().prepareCreate("idx").setMapping("tag", "type=keyword").get()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java index 74f39d6c626e3..fb84134120e00 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/backpressure/SearchBackpressureIT.java @@ -21,7 +21,6 @@ import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.io.stream.StreamInput; @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java index 1ef9930ede86d..ad1ce0582cfb3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchRedStateIndexIT.java @@ -42,7 +42,6 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.cluster.routing.ShardRoutingState; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.search.SearchService; import org.opensearch.test.OpenSearchIntegTestCase; @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testAllowPartialsWithRedState() throws Exception { final int numShards = cluster().numDataNodes() + 2; buildRedIndex(numShards); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java index 51a40e51f8964..681f7081fa2dc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileCreatingIndexIT.java @@ -39,7 +39,6 @@ import org.opensearch.client.Client; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -68,11 +67,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testIndexCausesIndexCreation() throws Exception { searchWhileCreatingIndex(false, 1); // 1 replica in our default... } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java index 2564fb893978b..f7b8b0df7dca7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWhileRelocatingIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHits; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSearchAndRelocateConcurrentlyRandomReplicas() throws Exception { testSearchAndRelocateConcurrently(randomIntBetween(0, 1)); } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java index 3ad29dfc8dfef..614ec2ebd634a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomExceptionsIT.java @@ -49,7 +49,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.MockEngineFactoryPlugin; import org.opensearch.index.query.QueryBuilders; @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(RandomExceptionDirectoryReaderWrapper.TestPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java index 9b03f4384c8f6..b45b334fc1d1c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/SearchWithRandomIOExceptionsIT.java @@ -45,7 +45,6 @@ import org.opensearch.client.Requests; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(MockFSIndexStore.TestPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java index 325fa170e0761..0e337822ba0e7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportSearchFailuresIT.java @@ -44,7 +44,6 @@ import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.search.builder.SearchSourceBuilder; @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int maximumNumberOfReplicas() { return 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java index 70f135690143f..a82b6f12755ca 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/basic/TransportTwoNodesSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.client.Requests; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MatchQueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -94,11 +93,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfReplicas() { return 0; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java index 4ae4ad81def4a..13b4abb58b4df 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/FetchSubPhasePluginIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.document.DocumentField; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.ParsingException; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singletonList(FetchTermVectorsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java index 266316cdff776..b743c00bf4549 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/InnerHitsIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; import org.opensearch.index.query.BoolQueryBuilder; @@ -102,11 +101,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index d10d62e34bb50..7a828c06c5cd7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentHelper; @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java index b10bb90538881..66cbf36137551 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/CustomHighlighterSearchIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase.ClusterScope; @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomHighlighterPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 3c81f8990128c..5bfc556bb629e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -49,7 +49,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; import org.opensearch.common.time.DateFormatter; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.XContentBuilder; @@ -145,11 +144,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class, MockAnalysisPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java index 99d5a1f8facce..4d398f8ca09cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fieldcaps/FieldCapabilitiesIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.fieldcaps.FieldCapabilitiesResponse; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilders; @@ -73,11 +72,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setUp() throws Exception { super.setUp(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index beb5679ae9b74..906d45ef84b3f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateUtils; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.core.common.bytes.BytesArray; @@ -118,11 +117,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java index c13338c297ad4..0380b3c7ddb89 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/DecayFunctionScoreIT.java @@ -46,7 +46,6 @@ import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java index 2890b4afa7129..0573dcfc4863d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/ExplainableScriptIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.Functions; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.plugins.ScriptPlugin; @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class ExplainableScriptPlugin extends Plugin implements ScriptPlugin { @Override public ScriptEngine getScriptEngine(Settings settings, Collection<ScriptContext<?>> contexts) { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java index 7f7dfba541f6d..6956833cf6d62 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreFieldValueIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.lucene.search.function.FieldValueFactorFunction; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testFieldValueFactor() throws IOException, InterruptedException { assertAcked( prepareCreate("test").setMapping( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java index c846e40c9cf41..4f267f0059291 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScoreIT.java @@ -39,7 +39,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.lucene.search.function.FunctionScoreQuery; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.query.MatchAllQueryBuilder; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder.FilterFunctionBuilder; @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java index 3ced0b457c44c..593f844305743 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/FunctionScorePluginIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.index.query.functionscore.DecayFunction; @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomDistanceScorePlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index 8286d089e3668..6c4ea0cdeb1f1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -43,7 +43,6 @@ import org.opensearch.common.lucene.search.function.CombineFunction; import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.Settings.Builder; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.Operator; @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testEnforceWindowSize() throws InterruptedException { createIndex("test"); // this diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java index 0b18e46050e2d..f1205ba0f1e93 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/RandomScoreFunctionIT.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.ArrayUtil; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -90,11 +89,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java index cfea8fc2af033..701ff0a94baf2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoFilterIT.java @@ -56,7 +56,6 @@ import org.opensearch.common.geo.builders.PointBuilder; import org.opensearch.common.geo.builders.PolygonBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.io.Streams; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.common.bytes.BytesReference; @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java index ccafddd84b605..2010a288427b3 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoPolygonIT.java @@ -39,7 +39,6 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.SearchHit; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -74,11 +73,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java index 3a2c5113797f9..6dbffa019382d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/GeoShapeIntegrationIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.builders.PointBuilder; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.IndexService; @@ -75,11 +74,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java index 7e7472f16a78b..e9115cf7dfbce 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/geo/LegacyGeoShapeIntegrationIT.java @@ -41,7 +41,6 @@ import org.opensearch.cluster.routing.IndexShardRoutingTable; import org.opensearch.common.geo.builders.ShapeBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.ToXContent; @@ -77,11 +76,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * Test that orientation parameter correctly persists across cluster restart */ diff --git a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java index 0ea47ebb9ea26..36fc5de0a5cf7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/morelikethis/MoreLikeThisIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.MoreLikeThisQueryBuilder; @@ -93,11 +92,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java index 0cf217ca2ee4d..9f49b7a27cda4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/msearch/MultiSearchIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.QueryBuilders; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleMultiSearch() throws InterruptedException { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java index 71f82d7c0b412..a6554271a0bc5 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedExplainIT.java @@ -12,7 +12,6 @@ import org.apache.lucene.search.join.ScoreMode; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; import static org.opensearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; @@ -31,11 +30,6 @@ */ public class SimpleNestedExplainIT extends OpenSearchIntegTestCase { - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /* * Tests the explain output for multiple docs. Concurrent search with multiple slices is tested * here as call to indexRandomForMultipleSlices is made and compared with explain output for diff --git a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java index 3755e3d07cae4..19e38da1aed05 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/nested/SimpleNestedIT.java @@ -47,7 +47,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.cluster.health.ClusterHealthStatus; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimpleNested() throws Exception { assertAcked(prepareCreate("test").setMapping("nested1", "type=nested")); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java index 05e4e18916535..8bea5ef97fbba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/pit/PitMultiNodeIT.java @@ -32,7 +32,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.search.builder.PointInTimeBuilder; import org.opensearch.test.InternalTestCluster; @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Before public void setupIndex() throws ExecutionException, InterruptedException { createIndex("index", Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 0).build()); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java index 8e0fc319192ad..bc9eeb528b031 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/preference/SearchPreferenceIT.java @@ -44,7 +44,6 @@ import org.opensearch.cluster.routing.OperationRouting; import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public Settings nodeSettings(int nodeOrdinal) { return Settings.builder() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java index 7cd0623ba48fd..2f608a0cbe06f 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/aggregation/AggregationProfilerIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; import org.opensearch.search.aggregations.InternalAggregation; @@ -178,11 +177,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { return 1; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 199a8d5f0896f..412a94aaf1b3e 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -42,7 +42,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; @@ -88,11 +87,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - /** * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, * constructs 20-100 random queries and tries to profile them diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java index d3a7f9fbb6423..b95542382e5fc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ExistsIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -76,11 +75,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // TODO: move this to a unit test somewhere... public void testEmptyIndex() throws Exception { createIndex("test"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java index e925e24e1d297..392f8b036b7a2 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/MultiMatchQueryIT.java @@ -39,7 +39,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.set.Sets; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; @@ -106,11 +105,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(MockKeywordPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java index e1d1e95f919e7..c43a9c23661ea 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/QueryStringIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperService; @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @BeforeClass public static void createRandomClusterSetting() { CLUSTER_MAX_CLAUSE_COUNT = randomIntBetween(50, 100); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java index bb09f96eee7f3..136ddce152f63 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/ScriptScoreQueryIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.RangeQueryBuilder; @@ -81,11 +80,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index 574ca618bd4a3..a58db51780826 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -51,7 +51,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -161,11 +160,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class, MockAnalysisPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java index aef58fc679946..31678d3f018a1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SimpleQueryStringIT.java @@ -43,7 +43,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.XContentBuilder; @@ -111,11 +110,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @BeforeClass public static void createRandomClusterSetting() { CLUSTER_MAX_CLAUSE_COUNT = randomIntBetween(60, 100); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java index faf1e5f24c5b4..7dbc61a3ced39 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexModule; @@ -84,11 +83,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java index e39469546dd0b..55b3cfeef7419 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/DuelScrollIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchType; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.SearchHits; @@ -73,11 +72,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testDuelQueryThenFetch() throws Exception { TestContext context = create(SearchType.DFS_QUERY_THEN_FETCH, SearchType.QUERY_THEN_FETCH); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java index c4a14193cbdce..35b5a7949b20b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollIT.java @@ -45,7 +45,6 @@ import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentHelper; import org.opensearch.core.common.bytes.BytesReference; @@ -105,11 +104,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @After public void cleanup() throws Exception { assertAcked( diff --git a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java index 382a3e4605b70..38f65c8c2d0da 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/scroll/SearchScrollWithFailingNodesIT.java @@ -39,7 +39,6 @@ import org.opensearch.cluster.routing.allocation.decider.ShardsLimitAllocationDecider; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { return 2; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java index 5b848ec38eece..13c510ff21338 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/searchafter/SearchAfterIT.java @@ -47,7 +47,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.SearchHit; import org.opensearch.search.builder.PointInTimeBuilder; @@ -85,11 +84,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testsShouldFail() throws Exception { assertAcked(client().admin().indices().prepareCreate("test").setMapping("field1", "type=long", "field2", "type=keyword").get()); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java index be666dbc304b5..0606bc1dbbdb6 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/simple/SimpleSearchIT.java @@ -40,7 +40,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest.RefreshPolicy; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.rest.RestStatus; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSearchNullIndex() { expectThrows( NullPointerException.class, diff --git a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java index 1d14ab40882fb..ea73f9ee1a2be 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/slice/SearchSliceIT.java @@ -45,7 +45,6 @@ import org.opensearch.common.action.ActionFuture; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.search.Scroll; @@ -83,11 +82,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private void setupIndex(int numDocs, int numberOfShards) throws IOException, ExecutionException, InterruptedException { String mapping = XContentFactory.jsonBuilder() .startObject() diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java index 6145c3b8d86ab..20458a4876dc7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/FieldSortIT.java @@ -47,7 +47,6 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Numbers; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.MediaTypeRegistry; @@ -122,11 +121,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public static class CustomScriptPlugin extends MockScriptPlugin { @Override protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java index d68cbe2fe4458..492ffce3321e4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceIT.java @@ -39,7 +39,6 @@ import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.geo.GeoDistance; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -80,11 +79,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected boolean forbidPrivateIndexSettings() { return false; diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java index 29ecab82e7a83..b6f53936d5939 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/GeoDistanceSortBuilderIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.DistanceUnit; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.query.GeoValidationMethod; import org.opensearch.search.builder.SearchSourceBuilder; @@ -78,11 +77,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private static final String LOCATION_FIELD = "location"; @Override diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java index 3d8d6ed291366..cb8b508c4496b 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SimpleSortIT.java @@ -40,7 +40,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.geo.GeoUtils; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.plugins.Plugin; import org.opensearch.script.MockScriptPlugin; @@ -91,11 +90,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomScriptPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java index 051d7bc888d83..ec891045cb510 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/sort/SortFromPluginIT.java @@ -12,7 +12,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.json.JsonXContent; import org.opensearch.plugins.Plugin; import org.opensearch.search.builder.SearchSourceBuilder; @@ -40,11 +39,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(CustomSortPlugin.class, InternalSettingsPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java index 7ac2c2c5e4015..4c1e47ef8da99 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/MetadataFetchingIT.java @@ -38,7 +38,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.InnerHitBuilder; import org.opensearch.index.query.NestedQueryBuilder; import org.opensearch.index.query.TermQueryBuilder; @@ -71,11 +70,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSimple() throws InterruptedException { assertAcked(prepareCreate("test")); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java index 465bf154bbb9f..294657cedcc5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/source/SourceFetchingIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; @@ -61,11 +60,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSourceDefaultBehavior() throws InterruptedException { createIndex("test"); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java index f770bd9864850..f8d2955440bc4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/ConcurrentSearchStatsIT.java @@ -16,7 +16,6 @@ import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; import org.opensearch.indices.IndicesQueryCache; @@ -61,6 +60,7 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(IndicesService.INDICES_CACHE_CLEAN_INTERVAL_SETTING.getKey(), "1ms") .put(IndicesQueryCache.INDICES_QUERIES_CACHE_ALL_SEGMENTS_SETTING.getKey(), true) .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, SEGMENT_SLICE_COUNT) + .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true) .build(); } @@ -74,11 +74,6 @@ public Settings indexSettings() { .build(); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testConcurrentQueryCount() throws Exception { String INDEX_1 = "test-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); String INDEX_2 = "test-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java index 06de4bd1f85e0..99cb3a4e8ca20 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/stats/SearchStatsIT.java @@ -45,7 +45,6 @@ import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.search.stats.SearchStats.Stats; import org.opensearch.plugins.Plugin; @@ -95,11 +94,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Collections.singleton(CustomScriptPlugin.class); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java index fa4ce2a0870ec..c72e128a88045 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/CompletionSuggestSearchIT.java @@ -48,7 +48,6 @@ import org.opensearch.common.FieldMemoryStats; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.mapper.MapperParsingException; @@ -113,11 +112,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final CompletionMappingBuilder completionMappingBuilder = new CompletionMappingBuilder(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java index 0a922a657986c..67523e9fd424a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.Fuzziness; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -86,11 +85,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - private final String INDEX = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); private final String FIELD = RandomStrings.randomAsciiOfLength(random(), 10).toLowerCase(Locale.ROOT); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java index a8c0f8a45fef8..e0afdbc816f5c 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/suggest/SuggestSearchIT.java @@ -41,7 +41,6 @@ import org.opensearch.action.search.SearchRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.IndexSettings; @@ -109,11 +108,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - // see #3196 public void testSuggestAcrossMultipleIndices() throws IOException { assertAcked(prepareCreate("test").setMapping("text", "type=text")); diff --git a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java index c42fa55887966..b89541c647580 100644 --- a/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/similarity/SimilarityIT.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.Arrays; @@ -62,11 +61,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testCustomBM25Similarity() throws Exception { try { client().admin().indices().prepareDelete("test").execute().actionGet(); diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 277286ae1ff1b..14b915935062c 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -703,7 +703,11 @@ public void apply(Settings value, Settings current, Settings previous) { CpuBasedAdmissionControllerSettings.CPU_BASED_ADMISSION_CONTROLLER_TRANSPORT_LAYER_MODE, CpuBasedAdmissionControllerSettings.INDEXING_CPU_USAGE_LIMIT, CpuBasedAdmissionControllerSettings.SEARCH_CPU_USAGE_LIMIT, - IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING + IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING, + + // Concurrent segment search settings + SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING, + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING ) ) ); @@ -716,11 +720,6 @@ public void apply(Settings value, Settings current, Settings previous) { * setting should be moved to {@link #BUILT_IN_CLUSTER_SETTINGS}. */ public static final Map<List<String>, List<Setting>> FEATURE_FLAGGED_CLUSTER_SETTINGS = Map.of( - List.of(FeatureFlags.CONCURRENT_SEGMENT_SEARCH), - List.of( - SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING, - SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING - ), List.of(FeatureFlags.TELEMETRY), List.of( TelemetrySettings.TRACER_ENABLED_SETTING, diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index e19f8e8370d5b..44dc4161f093a 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -32,7 +32,6 @@ protected FeatureFlagSettings( public static final Set<Setting<?>> BUILT_IN_FEATURE_FLAGS = Set.of( FeatureFlags.EXTENSIONS_SETTING, FeatureFlags.IDENTITY_SETTING, - FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING, FeatureFlags.TELEMETRY_SETTING, FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index b34a2aaffe408..97d6a0ddf02c8 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -41,7 +41,6 @@ import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.logging.Loggers; import org.opensearch.common.settings.Setting.Property; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; import org.opensearch.index.IndexSortConfig; @@ -230,6 +229,9 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING, IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING, + // Settings for concurrent segment search + IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING, + // validate that built-in similarities don't get redefined Setting.groupSetting("index.similarity.", (s) -> { Map<String, Settings> groups = s.getAsGroups(); @@ -252,10 +254,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { * is ready for production release, the feature flag can be removed, and the * setting should be moved to {@link #BUILT_IN_INDEX_SETTINGS}. */ - public static final Map<String, List<Setting>> FEATURE_FLAGGED_INDEX_SETTINGS = Map.of( - FeatureFlags.CONCURRENT_SEGMENT_SEARCH, - List.of(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING) - ); + public static final Map<String, List<Setting>> FEATURE_FLAGGED_INDEX_SETTINGS = Map.of(); public static final IndexScopedSettings DEFAULT_SCOPED_SETTINGS = new IndexScopedSettings(Settings.EMPTY, BUILT_IN_INDEX_SETTINGS); diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index d4ab161527cc0..c88a795501ca6 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -38,12 +38,6 @@ public class FeatureFlags { */ public static final String IDENTITY = "opensearch.experimental.feature.identity.enabled"; - /** - * Gates the functionality of concurrently searching the segments - * Once the feature is ready for release, this feature flag can be removed. - */ - public static final String CONCURRENT_SEGMENT_SEARCH = "opensearch.experimental.feature.concurrent_segment_search.enabled"; - /** * Gates the functionality of telemetry framework. */ @@ -105,12 +99,6 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { public static final Setting<Boolean> TELEMETRY_SETTING = Setting.boolSetting(TELEMETRY, false, Property.NodeScope); - public static final Setting<Boolean> CONCURRENT_SEGMENT_SEARCH_SETTING = Setting.boolSetting( - CONCURRENT_SEGMENT_SEARCH, - false, - Property.NodeScope - ); - public static final Setting<Boolean> DATETIME_FORMATTER_CACHING_SETTING = Setting.boolSetting( DATETIME_FORMATTER_CACHING, true, diff --git a/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java b/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java index b13f1eb04a941..576e00f8f30d1 100644 --- a/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java +++ b/server/src/main/java/org/opensearch/index/search/stats/SearchStats.java @@ -38,7 +38,6 @@ import org.opensearch.common.Nullable; import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.common.io.stream.StreamOutput; @@ -466,12 +465,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.humanReadableField(Fields.QUERY_TIME_IN_MILLIS, Fields.QUERY_TIME, getQueryTime()); builder.field(Fields.QUERY_CURRENT, queryCurrent); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - builder.field(Fields.CONCURRENT_QUERY_TOTAL, concurrentQueryCount); - builder.humanReadableField(Fields.CONCURRENT_QUERY_TIME_IN_MILLIS, Fields.CONCURRENT_QUERY_TIME, getConcurrentQueryTime()); - builder.field(Fields.CONCURRENT_QUERY_CURRENT, concurrentQueryCurrent); - builder.field(Fields.CONCURRENT_AVG_SLICE_COUNT, getConcurrentAvgSliceCount()); - } + builder.field(Fields.CONCURRENT_QUERY_TOTAL, concurrentQueryCount); + builder.humanReadableField(Fields.CONCURRENT_QUERY_TIME_IN_MILLIS, Fields.CONCURRENT_QUERY_TIME, getConcurrentQueryTime()); + builder.field(Fields.CONCURRENT_QUERY_CURRENT, concurrentQueryCurrent); + builder.field(Fields.CONCURRENT_AVG_SLICE_COUNT, getConcurrentAvgSliceCount()); builder.field(Fields.FETCH_TOTAL, fetchCount); builder.humanReadableField(Fields.FETCH_TIME_IN_MILLIS, Fields.FETCH_TIME, getFetchTime()); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java index 23cc1cb507072..9dc711f804144 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestIndicesAction.java @@ -54,7 +54,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.action.ActionResponse; import org.opensearch.core.common.Strings; @@ -582,31 +581,29 @@ protected Table getTableWithHeader(final RestRequest request) { "sibling:pri;alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops" ); table.addCell("pri.search.query_total", "default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "sibling:pri;alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell("pri.search.concurrent_query_current", "default:false;text-align:right;desc:current concurrent query phase ops"); + table.addCell( + "search.concurrent_query_current", + "sibling:pri;alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell("pri.search.concurrent_query_current", "default:false;text-align:right;desc:current concurrent query phase ops"); - table.addCell( - "search.concurrent_query_time", - "sibling:pri;alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell("pri.search.concurrent_query_time", "default:false;text-align:right;desc:time spent in concurrent query phase"); + table.addCell( + "search.concurrent_query_time", + "sibling:pri;alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell("pri.search.concurrent_query_time", "default:false;text-align:right;desc:time spent in concurrent query phase"); - table.addCell( - "search.concurrent_query_total", - "sibling:pri;alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total query phase ops" - ); - table.addCell("pri.search.concurrent_query_total", "default:false;text-align:right;desc:total query phase ops"); + table.addCell( + "search.concurrent_query_total", + "sibling:pri;alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total query phase ops" + ); + table.addCell("pri.search.concurrent_query_total", "default:false;text-align:right;desc:total query phase ops"); - table.addCell( - "search.concurrent_avg_slice_count", - "sibling:pri;alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - table.addCell("pri.search.concurrent_avg_slice_count", "default:false;text-align:right;desc:average query concurrency"); - } + table.addCell( + "search.concurrent_avg_slice_count", + "sibling:pri;alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); + table.addCell("pri.search.concurrent_avg_slice_count", "default:false;text-align:right;desc:average query concurrency"); table.addCell( "search.scroll_current", @@ -916,19 +913,17 @@ Table buildTable( table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getQueryCount()); table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getQueryCount()); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCurrent()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCurrent()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCurrent()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCurrent()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryTime()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryTime()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryTime()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryTime()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCount()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCount()); + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentQueryCount()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentQueryCount()); - table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentAvgSliceCount()); - table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentAvgSliceCount()); - } + table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getConcurrentAvgSliceCount()); + table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getConcurrentAvgSliceCount()); table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getScrollCurrent()); table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getScrollCurrent()); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java index dd3e0ba836557..e11012a23fce7 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestNodesAction.java @@ -47,7 +47,6 @@ import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.network.NetworkAddress; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.core.common.transport.TransportAddress; import org.opensearch.core.common.unit.ByteSizeValue; @@ -304,24 +303,22 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("search.query_current", "alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell( - "search.concurrent_query_time", - "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell( - "search.concurrent_query_total", - "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" - ); - table.addCell( - "search.concurrent_avg_slice_count", - "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - } + table.addCell( + "search.concurrent_query_current", + "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell( + "search.concurrent_query_time", + "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell( + "search.concurrent_query_total", + "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" + ); + table.addCell( + "search.concurrent_avg_slice_count", + "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); table.addCell( "search.scroll_time", @@ -548,12 +545,10 @@ Table buildTable( table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryCurrent()); table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryTime()); table.addCell(searchStats == null ? null : searchStats.getTotal().getQueryCount()); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCurrent()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryTime()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCount()); - table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentAvgSliceCount()); - } + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCurrent()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryTime()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentQueryCount()); + table.addCell(searchStats == null ? null : searchStats.getTotal().getConcurrentAvgSliceCount()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollCurrent()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollTime()); table.addCell(searchStats == null ? null : searchStats.getTotal().getScrollCount()); diff --git a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java index d0d00e4c4596a..4cd10c6874e0a 100644 --- a/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/opensearch/rest/action/cat/RestShardsAction.java @@ -44,7 +44,6 @@ import org.opensearch.common.Table; import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.Strings; import org.opensearch.index.cache.query.QueryCacheStats; import org.opensearch.index.engine.CommitStats; @@ -220,24 +219,22 @@ protected Table getTableWithHeader(final RestRequest request) { table.addCell("search.query_current", "alias:sqc,searchQueryCurrent;default:false;text-align:right;desc:current query phase ops"); table.addCell("search.query_time", "alias:sqti,searchQueryTime;default:false;text-align:right;desc:time spent in query phase"); table.addCell("search.query_total", "alias:sqto,searchQueryTotal;default:false;text-align:right;desc:total query phase ops"); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell( - "search.concurrent_query_current", - "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" - ); - table.addCell( - "search.concurrent_query_time", - "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" - ); - table.addCell( - "search.concurrent_query_total", - "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" - ); - table.addCell( - "search.concurrent_avg_slice_count", - "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" - ); - } + table.addCell( + "search.concurrent_query_current", + "alias:scqc,searchConcurrentQueryCurrent;default:false;text-align:right;desc:current concurrent query phase ops" + ); + table.addCell( + "search.concurrent_query_time", + "alias:scqti,searchConcurrentQueryTime;default:false;text-align:right;desc:time spent in concurrent query phase" + ); + table.addCell( + "search.concurrent_query_total", + "alias:scqto,searchConcurrentQueryTotal;default:false;text-align:right;desc:total concurrent query phase ops" + ); + table.addCell( + "search.concurrent_avg_slice_count", + "alias:casc,searchConcurrentAvgSliceCount;default:false;text-align:right;desc:average query concurrency" + ); table.addCell("search.scroll_current", "alias:scc,searchScrollCurrent;default:false;text-align:right;desc:open scroll contexts"); table.addCell( "search.scroll_time", @@ -419,13 +416,11 @@ Table buildTable(RestRequest request, ClusterStateResponse state, IndicesStatsRe table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryCurrent())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryTime())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getQueryCount())); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCurrent())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryTime())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCount())); - table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentAvgSliceCount())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCurrent())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryTime())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentQueryCount())); + table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getConcurrentAvgSliceCount())); - } table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollCurrent())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollTime())); table.addCell(getOrNull(commonStats, CommonStats::getSearch, i -> i.getTotal().getScrollCount())); diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index 960b46d68977b..63e32dcf8f0b7 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -50,7 +50,6 @@ import org.opensearch.common.lucene.search.Queries; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; import org.opensearch.index.IndexService; import org.opensearch.index.IndexSettings; @@ -950,9 +949,7 @@ public BucketCollectorProcessor bucketCollectorProcessor() { * false: otherwise */ private boolean evaluateConcurrentSegmentSearchSettings(Executor concurrentSearchExecutor) { - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) - && (clusterService != null) - && (concurrentSearchExecutor != null)) { + if ((clusterService != null) && (concurrentSearchExecutor != null)) { return indexService.getIndexSettings() .getSettings() .getAsBoolean( diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index 62d397de58187..88218896dceae 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -39,7 +39,6 @@ import org.opensearch.common.geo.ShapesAvailability; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.ParseFieldRegistry; import org.opensearch.core.ParseField; import org.opensearch.core.common.io.stream.NamedWriteableRegistry; @@ -1279,7 +1278,7 @@ private SearchPlugin.ExecutorServiceProvider registerIndexSearcherExecutorProvid } } - if (provider == null && FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { + if (provider == null) { provider = (ThreadPool threadPool) -> threadPool.executor(INDEX_SEARCHER); } return provider; diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 2c85fcbb25f35..7cd91557fcf6d 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -250,7 +250,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public static final Setting<Boolean> CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING = Setting.boolSetting( "search.concurrent_segment_search.enabled", - true, + false, Property.Dynamic, Property.NodeScope ); diff --git a/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java b/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java index 631ace41090d7..19a59e9f7bebe 100644 --- a/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java +++ b/server/src/main/java/org/opensearch/search/query/QueryPhaseSearcherWrapper.java @@ -12,7 +12,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.Query; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.AggregationProcessor; import org.opensearch.search.internal.ContextIndexSearcher; import org.opensearch.search.internal.SearchContext; @@ -33,9 +32,7 @@ public class QueryPhaseSearcherWrapper implements QueryPhaseSearcher { public QueryPhaseSearcherWrapper() { this.defaultQueryPhaseSearcher = new QueryPhase.DefaultQueryPhaseSearcher(); - this.concurrentQueryPhaseSearcher = FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH) - ? new ConcurrentQueryPhaseSearcher() - : null; + this.concurrentQueryPhaseSearcher = new ConcurrentQueryPhaseSearcher(); } /** @@ -58,10 +55,8 @@ public boolean searchWith( boolean hasTimeout ) throws IOException { if (searchContext.shouldUseConcurrentSearch()) { - LOGGER.debug("Using concurrent search over segments (experimental) for request with context id {}", searchContext.id()); return concurrentQueryPhaseSearcher.searchWith(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout); } else { - LOGGER.debug("Using non-concurrent search over segments for request with context id {}", searchContext.id()); return defaultQueryPhaseSearcher.searchWith(searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout); } } @@ -74,13 +69,8 @@ public boolean searchWith( @Override public AggregationProcessor aggregationProcessor(SearchContext searchContext) { if (searchContext.shouldUseConcurrentSearch()) { - LOGGER.debug( - "Using concurrent aggregation processor over segments (experimental) for request with context id {}", - searchContext.id() - ); return concurrentQueryPhaseSearcher.aggregationProcessor(searchContext); } else { - LOGGER.debug("Using non-concurrent aggregation processor over segments for request with context id {}", searchContext.id()); return defaultQueryPhaseSearcher.aggregationProcessor(searchContext); } } diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index 12052598d3671..84e67d0a5b178 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -42,7 +42,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.SizeValue; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.concurrent.OpenSearchExecutors; import org.opensearch.common.util.concurrent.OpenSearchThreadPoolExecutor; import org.opensearch.common.util.concurrent.ThreadContext; @@ -187,9 +186,7 @@ public static ThreadPoolType fromType(String type) { map.put(Names.REMOTE_PURGE, ThreadPoolType.SCALING); map.put(Names.REMOTE_REFRESH_RETRY, ThreadPoolType.SCALING); map.put(Names.REMOTE_RECOVERY, ThreadPoolType.SCALING); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); - } + map.put(Names.INDEX_SEARCHER, ThreadPoolType.RESIZABLE); THREAD_POOL_TYPES = Collections.unmodifiableMap(map); } @@ -282,12 +279,10 @@ public ThreadPool( TimeValue.timeValueMinutes(5) ) ); - if (FeatureFlags.isEnabled(FeatureFlags.CONCURRENT_SEGMENT_SEARCH)) { - builders.put( - Names.INDEX_SEARCHER, - new ResizableExecutorBuilder(settings, Names.INDEX_SEARCHER, allocatedProcessors, 1000, runnableTaskListener) - ); - } + builders.put( + Names.INDEX_SEARCHER, + new ResizableExecutorBuilder(settings, Names.INDEX_SEARCHER, allocatedProcessors, 1000, runnableTaskListener) + ); for (final ExecutorBuilder<?> builder : customBuilders) { if (builders.containsKey(builder.name())) { diff --git a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java index 00935ae0940a1..e84b5213be39e 100644 --- a/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/opensearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -64,7 +64,6 @@ import org.apache.lucene.store.Directory; import org.opensearch.action.admin.indices.alias.Alias; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -96,11 +95,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - protected static class TestFieldSetting { public final String name; public final boolean storedOffset; diff --git a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java index 0becb6cde5e64..66c9801d16598 100644 --- a/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java +++ b/server/src/test/java/org/opensearch/common/settings/SettingsModuleTests.java @@ -286,24 +286,9 @@ public void testDynamicIndexSettingsRegistration() { } public void testConcurrentSegmentSearchClusterSettings() { - // Test that we throw an exception without the feature flag - Settings settings = Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(); - SettingsException ex = expectThrows(SettingsException.class, () -> new SettingsModule(settings)); - assertEquals( - "unknown setting [" - + SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey() - + "] please check that any required plugins are installed, or check the breaking " - + "changes documentation for removed settings", - ex.getMessage() - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); boolean settingValue = randomBoolean(); - Settings settingsWithFeatureFlag = Settings.builder() - .put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), settingValue) - .build(); - SettingsModule settingsModule = new SettingsModule(settingsWithFeatureFlag); + Settings settings = Settings.builder().put(SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), settingValue).build(); + SettingsModule settingsModule = new SettingsModule(settings); assertEquals(settingValue, SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.get(settingsModule.getSettings())); } @@ -311,24 +296,9 @@ public void testConcurrentSegmentSearchIndexSettings() { Settings.Builder target = Settings.builder().put(Settings.EMPTY); Settings.Builder update = Settings.builder(); - // Test that we throw an exception without the feature flag SettingsModule module = new SettingsModule(Settings.EMPTY); IndexScopedSettings indexScopedSettings = module.getIndexScopedSettings(); - expectThrows( - SettingsException.class, - () -> indexScopedSettings.updateDynamicSettings( - Settings.builder().put(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(), - target, - update, - "node" - ) - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); - SettingsModule moduleWithFeatureFlag = new SettingsModule(Settings.EMPTY); - IndexScopedSettings indexScopedSettingsWithFeatureFlag = moduleWithFeatureFlag.getIndexScopedSettings(); - indexScopedSettingsWithFeatureFlag.updateDynamicSettings( + indexScopedSettings.updateDynamicSettings( Settings.builder().put(IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(), target, update, @@ -337,23 +307,11 @@ public void testConcurrentSegmentSearchIndexSettings() { } public void testMaxSliceCountClusterSettingsForConcurrentSearch() { - // Test that we throw an exception without the feature flag - Settings settings = Settings.builder() - .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), 2) - .build(); - SettingsException ex = expectThrows(SettingsException.class, () -> new SettingsModule(settings)); - assertTrue( - ex.getMessage() - .contains("unknown setting [" + SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey() + "]") - ); - - // Test that the settings updates correctly with the feature flag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); int settingValue = randomIntBetween(0, 10); - Settings settingsWithFeatureFlag = Settings.builder() + Settings settings = Settings.builder() .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); - SettingsModule settingsModule = new SettingsModule(settingsWithFeatureFlag); + SettingsModule settingsModule = new SettingsModule(settings); assertEquals( settingValue, (int) SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.get(settingsModule.getSettings()) @@ -361,10 +319,10 @@ public void testMaxSliceCountClusterSettingsForConcurrentSearch() { // Test that negative value is not allowed settingValue = -1; - final Settings settingsWithFeatureFlag_2 = Settings.builder() + final Settings settings_2 = Settings.builder() .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey(), settingValue) .build(); - IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settingsWithFeatureFlag_2)); + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> new SettingsModule(settings_2)); assertTrue(iae.getMessage().contains(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_SETTING.getKey())); } } diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java index 73f83a5642bb4..fa13ec2036797 100644 --- a/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/cat/RestShardsActionTests.java @@ -125,7 +125,7 @@ public void testBuildTable() { assertThat(headers.get(6).value, equalTo("ip")); assertThat(headers.get(7).value, equalTo("id")); assertThat(headers.get(8).value, equalTo("node")); - assertThat(headers.get(74).value, equalTo("docs.deleted")); + assertThat(headers.get(78).value, equalTo("docs.deleted")); final List<List<Table.Cell>> rows = table.getRows(); assertThat(rows.size(), equalTo(numShards)); @@ -141,9 +141,9 @@ public void testBuildTable() { assertThat(row.get(4).value, equalTo(shardStats.getStats().getDocs().getCount())); assertThat(row.get(6).value, equalTo(localNode.getHostAddress())); assertThat(row.get(7).value, equalTo(localNode.getId())); - assertThat(row.get(72).value, equalTo(shardStats.getDataPath())); - assertThat(row.get(73).value, equalTo(shardStats.getStatePath())); - assertThat(row.get(74).value, equalTo(shardStats.getStats().getDocs().getDeleted())); + assertThat(row.get(76).value, equalTo(shardStats.getDataPath())); + assertThat(row.get(77).value, equalTo(shardStats.getStatePath())); + assertThat(row.get(78).value, equalTo(shardStats.getStats().getDocs().getDeleted())); } } } diff --git a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java index 347011af98c6d..3793249d569f0 100644 --- a/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/opensearch/search/DefaultSearchContextTests.java @@ -54,7 +54,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.BigArrays; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; @@ -80,7 +79,6 @@ import org.opensearch.search.rescore.RescoreContext; import org.opensearch.search.slice.SliceBuilder; import org.opensearch.search.sort.SortAndFormats; -import org.opensearch.test.FeatureFlagSetter; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; @@ -554,8 +552,6 @@ protected Engine.Searcher acquireSearcherInternal(String source) { } public void testSearchPathEvaluationUsingSortField() throws Exception { - // enable the concurrent set FeatureFlag - FeatureFlagSetter.set(FeatureFlags.CONCURRENT_SEGMENT_SEARCH); ShardSearchRequest shardSearchRequest = mock(ShardSearchRequest.class); when(shardSearchRequest.searchType()).thenReturn(SearchType.DEFAULT); ShardId shardId = new ShardId("index", UUID.randomUUID().toString(), 1); diff --git a/server/src/test/java/org/opensearch/search/SearchModuleTests.java b/server/src/test/java/org/opensearch/search/SearchModuleTests.java index 317253be9825f..01b8d6d8cdd72 100644 --- a/server/src/test/java/org/opensearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/opensearch/search/SearchModuleTests.java @@ -113,7 +113,6 @@ import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; public class SearchModuleTests extends OpenSearchTestCase { @@ -431,9 +430,7 @@ public void testDefaultQueryPhaseSearcher() { } public void testConcurrentQueryPhaseSearcher() { - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); - SearchModule searchModule = new SearchModule(settings, Collections.emptyList()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); TestSearchContext searchContext = new TestSearchContext(null); searchContext.setConcurrentSegmentSearchEnabled(true); QueryPhase queryPhase = searchModule.getQueryPhase(); @@ -443,8 +440,6 @@ public void testConcurrentQueryPhaseSearcher() { } public void testPluginQueryPhaseSearcher() { - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); QueryPhaseSearcher queryPhaseSearcher = (searchContext, searcher, query, collectors, hasFilterCollector, hasTimeout) -> false; SearchPlugin plugin1 = new SearchPlugin() { @Override @@ -452,7 +447,7 @@ public Optional<QueryPhaseSearcher> getQueryPhaseSearcher() { return Optional.of(queryPhaseSearcher); } }; - SearchModule searchModule = new SearchModule(settings, Collections.singletonList(plugin1)); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.singletonList(plugin1)); QueryPhase queryPhase = searchModule.getQueryPhase(); TestSearchContext searchContext = new TestSearchContext(null); assertEquals(queryPhaseSearcher, queryPhase.getQueryPhaseSearcher()); @@ -480,18 +475,10 @@ public Optional<QueryPhaseSearcher> getQueryPhaseSearcher() { } public void testIndexSearcher() { - SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); ThreadPool threadPool = mock(ThreadPool.class); - assertNull(searchModule.getIndexSearcherExecutor(threadPool)); - verify(threadPool, times(0)).executor(ThreadPool.Names.INDEX_SEARCHER); - - // enable concurrent segment search feature flag - Settings settings = Settings.builder().put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, true).build(); - FeatureFlags.initializeFeatureFlags(settings); - searchModule = new SearchModule(settings, Collections.emptyList()); + SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); searchModule.getIndexSearcherExecutor(threadPool); verify(threadPool).executor(ThreadPool.Names.INDEX_SEARCHER); - FeatureFlags.initializeFeatureFlags(Settings.EMPTY); } public void testMultiplePluginRegisterIndexSearcherProvider() { diff --git a/server/src/test/java/org/opensearch/search/SearchServiceTests.java b/server/src/test/java/org/opensearch/search/SearchServiceTests.java index 7c84078af080e..d502bab5918a8 100644 --- a/server/src/test/java/org/opensearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/opensearch/search/SearchServiceTests.java @@ -57,7 +57,6 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.settings.SettingsException; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.Strings; import org.opensearch.core.common.io.stream.StreamInput; @@ -225,11 +224,6 @@ public void onQueryPhase(SearchContext context, long tookInNanos) { } } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Settings nodeSettings() { return Settings.builder().put("search.default_search_timeout", "5s").build(); @@ -1189,7 +1183,7 @@ public void testCreateSearchContext() throws IOException { public void testConcurrentSegmentSearchSearchContext() throws IOException { Boolean[][] scenarios = { // cluster setting, index setting, concurrent search enabled? - { null, null, true }, + { null, null, false }, { null, false, false }, { null, true, true }, { true, null, true }, diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java index 9c3a825eb6d86..eef7e4c45849d 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -37,7 +37,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; import java.util.ArrayList; @@ -66,11 +65,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected int numberOfShards() { // we need at least 2 diff --git a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java index b058d96aa7327..94cb4c7955a21 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/server/src/test/java/org/opensearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -39,7 +39,6 @@ import org.opensearch.common.document.DocumentField; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; @@ -97,11 +96,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex(UNMAPPED_IDX_NAME); diff --git a/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java b/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java index 90f4f1ba2ceb2..5cb5cdd18f6cc 100644 --- a/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java +++ b/test/external-modules/delayed-aggs/src/internalClusterTest/java/org/opensearch/search/aggregations/DelayedShardAggregationIT.java @@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.plugins.Plugin; import org.opensearch.search.aggregations.bucket.filter.InternalFilter; import org.opensearch.search.aggregations.metrics.InternalMax; @@ -64,11 +63,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(DelayedShardAggregationPlugin.class); diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java index cdf4d0db6385f..0b44fe447d6f8 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/bucket/AbstractTermsTestCase.java @@ -36,7 +36,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.bucket.terms.Terms; import org.opensearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode; @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public String randomExecutionHint() { return randomBoolean() ? null : randomFrom(ExecutionMode.values()).toString(); } diff --git a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java index d2957276700c0..8c2cefa89c860 100644 --- a/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java +++ b/test/framework/src/main/java/org/opensearch/search/aggregations/metrics/AbstractNumericTestCase.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.ParameterizedStaticSettingsOpenSearchIntegTestCase; @@ -63,11 +62,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - @Override public void setupSuiteScopeCluster() throws Exception { createIndex("idx"); diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 5e79396de8d8d..72d68ffb9449d 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -1857,6 +1857,9 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(SearchService.LOW_LEVEL_CANCELLATION_SETTING.getKey(), randomBoolean()) .putList(DISCOVERY_SEED_HOSTS_SETTING.getKey()) // empty list disables a port scan for other nodes .putList(DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), "file") + // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices + // when tests are run with concurrent segment search enabled + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2) .put(featureFlagSettings()); // Enable tracer only when Telemetry Setting is enabled @@ -1864,11 +1867,6 @@ protected Settings nodeSettings(int nodeOrdinal) { builder.put(TelemetrySettings.TRACER_FEATURE_ENABLED_SETTING.getKey(), true); builder.put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true); } - if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { - // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices - // when tests are run with concurrent segment search enabled - builder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); - } return builder.build(); } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java index efc29d1c254e6..45ea63e862df6 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchSingleNodeTestCase.java @@ -255,13 +255,11 @@ private Node newNode() { .put(FeatureFlags.TELEMETRY_SETTING.getKey(), true) .put(TelemetrySettings.TRACER_ENABLED_SETTING.getKey(), true) .put(TelemetrySettings.TRACER_FEATURE_ENABLED_SETTING.getKey(), true) - .put(nodeSettings()) // allow test cases to provide their own settings or override these - .put(featureFlagSettings); - if (FeatureFlags.CONCURRENT_SEGMENT_SEARCH_SETTING.get(featureFlagSettings)) { // By default, for tests we will put the target slice count of 2. This will increase the probability of having multiple slices // when tests are run with concurrent segment search enabled - settingsBuilder.put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2); - } + .put(SearchService.CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY, 2) + .put(nodeSettings()) // allow test cases to provide their own settings or override these + .put(featureFlagSettings); Collection<Class<? extends Plugin>> plugins = getPlugins(); if (plugins.contains(getTestTransportPlugin()) == false) { diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java index 4b864d820d9ec..6dd14e06248a9 100644 --- a/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -12,7 +12,6 @@ import org.opensearch.action.admin.cluster.state.ClusterStateResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -34,11 +33,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); assertThat( diff --git a/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java index 76e05d55a854b..f38c1ecd26429 100644 --- a/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/ParameterizedStaticSettingsOpenSearchIntegTests.java @@ -13,7 +13,6 @@ import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -36,11 +35,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); for (final NodeInfo node : nodes.getNodes()) { diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java index e96eaec6e85a3..1f9a7cb87ae15 100644 --- a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -12,7 +12,6 @@ import org.opensearch.action.admin.cluster.state.ClusterStateResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -35,11 +34,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); assertThat( diff --git a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java index 093be04d556b0..36ca14e453158 100644 --- a/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/SuiteScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -13,7 +13,6 @@ import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -36,11 +35,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); for (final NodeInfo node : nodes.getNodes()) { diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java index bd271b93ba674..84caebdb4302f 100644 --- a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedDynamicSettingsOpenSearchIntegTests.java @@ -12,7 +12,6 @@ import org.opensearch.action.admin.cluster.state.ClusterStateResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -35,11 +34,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final ClusterStateResponse cluster = client().admin().cluster().prepareState().all().get(); assertThat( diff --git a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java index 014f96657ba03..6df8ad2c27210 100644 --- a/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java +++ b/test/framework/src/test/java/org/opensearch/test/TestScopedParameterizedStaticSettingsOpenSearchIntegTests.java @@ -13,7 +13,6 @@ import org.opensearch.action.admin.cluster.node.info.NodeInfo; import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; import org.opensearch.common.settings.Settings; -import org.opensearch.common.util.FeatureFlags; import java.io.IOException; import java.util.Arrays; @@ -36,11 +35,6 @@ public static Collection<Object[]> parameters() { ); } - @Override - protected Settings featureFlagSettings() { - return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); - } - public void testSettings() throws IOException { final NodesInfoResponse nodes = client().admin().cluster().prepareNodesInfo().get(); for (final NodeInfo node : nodes.getNodes()) { From c564ee35ec000c38fe9d525a4b6add90cfc969fc Mon Sep 17 00:00:00 2001 From: panguixin <panguixin@bytedance.com> Date: Sat, 3 Feb 2024 08:27:13 +0800 Subject: [PATCH 163/178] Cleanup file cache on deleting index/shard directory (#11443) * cleanup file cache on deleting index/shard directory Signed-off-by: panguixin <panguixin@bytedance.com> * add index store listener Signed-off-by: panguixin <panguixin@bytedance.com> --------- Signed-off-by: panguixin <panguixin@bytedance.com> --- .../snapshots/SearchableSnapshotIT.java | 74 +++++++++++++ .../org/opensearch/env/NodeEnvironment.java | 43 +++++++- .../remote/filecache/FileCacheCleaner.java | 100 ++++++++++-------- .../opensearch/indices/IndicesService.java | 5 - .../main/java/org/opensearch/node/Node.java | 8 +- .../opensearch/env/NodeEnvironmentTests.java | 56 ++++++++++ .../filecache/FileCacheCleanerTests.java | 18 +--- .../snapshots/SnapshotResiliencyTests.java | 3 - 8 files changed, 235 insertions(+), 72 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java index 21554a8e4fb15..c89fef20aafb1 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SearchableSnapshotIT.java @@ -30,14 +30,17 @@ import org.opensearch.cluster.routing.GroupShardsIterator; import org.opensearch.cluster.routing.ShardIterator; import org.opensearch.cluster.routing.ShardRouting; +import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.opensearch.common.Priority; import org.opensearch.common.io.PathUtils; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.index.Index; +import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.index.shard.ShardPath; import org.opensearch.index.store.remote.file.CleanerDaemonThreadLeakFilter; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.monitor.fs.FsInfo; @@ -859,4 +862,75 @@ private void assertCacheDirectoryReplicaAndIndexCount(int numCacheFolderCount, i // Verifies if all the shards (primary and replica) have been deleted assertEquals(numCacheFolderCount, searchNodeFileCachePaths.size()); } + + public void testRelocateSearchableSnapshotIndex() throws Exception { + final String snapshotName = "test-snap"; + final String repoName = "test-repo"; + final String indexName = "test-idx-1"; + final String restoredIndexName = indexName + "-copy"; + final Client client = client(); + + internalCluster().ensureAtLeastNumDataNodes(1); + createIndexWithDocsAndEnsureGreen(0, 100, indexName); + + createRepositoryWithSettings(null, repoName); + takeSnapshot(client, snapshotName, repoName, indexName); + deleteIndicesAndEnsureGreen(client, indexName); + + String searchNode1 = internalCluster().startSearchOnlyNodes(1).get(0); + internalCluster().validateClusterFormed(); + restoreSnapshotAndEnsureGreen(client, snapshotName, repoName); + assertRemoteSnapshotIndexSettings(client, restoredIndexName); + + String searchNode2 = internalCluster().startSearchOnlyNodes(1).get(0); + internalCluster().validateClusterFormed(); + + final Index index = resolveIndex(restoredIndexName); + assertSearchableSnapshotIndexDirectoryExistence(searchNode1, index, true); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, false); + + // relocate the shard from node1 to node2 + client.admin() + .cluster() + .prepareReroute() + .add(new MoveAllocationCommand(restoredIndexName, 0, searchNode1, searchNode2)) + .execute() + .actionGet(); + ClusterHealthResponse clusterHealthResponse = client.admin() + .cluster() + .prepareHealth() + .setWaitForEvents(Priority.LANGUID) + .setWaitForNoRelocatingShards(true) + .setTimeout(new TimeValue(5, TimeUnit.MINUTES)) + .execute() + .actionGet(); + assertThat(clusterHealthResponse.isTimedOut(), equalTo(false)); + assertDocCount(restoredIndexName, 100L); + + assertSearchableSnapshotIndexDirectoryExistence(searchNode1, index, false); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, true); + deleteIndicesAndEnsureGreen(client, restoredIndexName); + assertSearchableSnapshotIndexDirectoryExistence(searchNode2, index, false); + } + + private void assertSearchableSnapshotIndexDirectoryExistence(String nodeName, Index index, boolean exists) throws Exception { + final Node node = internalCluster().getInstance(Node.class, nodeName); + final ShardId shardId = new ShardId(index, 0); + final ShardPath shardPath = ShardPath.loadFileCachePath(node.getNodeEnvironment(), shardId); + + assertBusy(() -> { + assertTrue( + "shard state path should " + (exists ? "exist" : "not exist"), + Files.exists(shardPath.getShardStatePath()) == exists + ); + assertTrue("shard cache path should " + (exists ? "exist" : "not exist"), Files.exists(shardPath.getDataPath()) == exists); + }, 30, TimeUnit.SECONDS); + + final Path indexDataPath = node.getNodeEnvironment().fileCacheNodePath().fileCachePath.resolve(index.getUUID()); + final Path indexPath = node.getNodeEnvironment().fileCacheNodePath().indicesPath.resolve(index.getUUID()); + assertBusy(() -> { + assertTrue("index path should " + (exists ? "exist" : "not exist"), Files.exists(indexDataPath) == exists); + assertTrue("index cache path should " + (exists ? "exist" : "not exist"), Files.exists(indexPath) == exists); + }, 30, TimeUnit.SECONDS); + } } diff --git a/server/src/main/java/org/opensearch/env/NodeEnvironment.java b/server/src/main/java/org/opensearch/env/NodeEnvironment.java index c7ba5eb040a1f..2748938d8b761 100644 --- a/server/src/main/java/org/opensearch/env/NodeEnvironment.java +++ b/server/src/main/java/org/opensearch/env/NodeEnvironment.java @@ -199,6 +199,8 @@ public String toString() { private final NodeMetadata nodeMetadata; + private final IndexStoreListener indexStoreListener; + /** * Maximum number of data nodes that should run in an environment. */ @@ -295,18 +297,23 @@ public void close() { } } + public NodeEnvironment(Settings settings, Environment environment) throws IOException { + this(settings, environment, IndexStoreListener.EMPTY); + } + /** * Setup the environment. * @param settings settings from opensearch.yml */ - public NodeEnvironment(Settings settings, Environment environment) throws IOException { - if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) { + public NodeEnvironment(Settings settings, Environment environment, IndexStoreListener indexStoreListener) throws IOException { + if (DiscoveryNode.nodeRequiresLocalStorage(settings) == false) { nodePaths = null; fileCacheNodePath = null; sharedDataPath = null; locks = null; nodeLockId = -1; nodeMetadata = new NodeMetadata(generateNodeId(settings), Version.CURRENT); + this.indexStoreListener = IndexStoreListener.EMPTY; return; } boolean success = false; @@ -385,6 +392,7 @@ public NodeEnvironment(Settings settings, Environment environment) throws IOExce } this.nodeMetadata = loadNodeMetadata(settings, logger, nodePaths); + this.indexStoreListener = indexStoreListener; success = true; } finally { if (success == false) { @@ -577,6 +585,9 @@ public static void acquireFSLockForPaths(IndexSettings indexSettings, Path... sh public void deleteShardDirectoryUnderLock(ShardLock lock, IndexSettings indexSettings) throws IOException { final ShardId shardId = lock.getShardId(); assert isShardLocked(shardId) : "shard " + shardId + " is not locked"; + + indexStoreListener.beforeShardPathDeleted(shardId, indexSettings, this); + final Path[] paths = availableShardPaths(shardId); logger.trace("acquiring locks for {}, paths: [{}]", shardId, paths); acquireFSLockForPaths(indexSettings, paths); @@ -653,6 +664,8 @@ public void deleteIndexDirectorySafe(Index index, long lockTimeoutMS, IndexSetti * @param indexSettings settings for the index being deleted */ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettings) throws IOException { + indexStoreListener.beforeIndexPathDeleted(index, indexSettings, this); + final Path[] indexPaths = indexPaths(index); logger.trace("deleting index {} directory, paths({}): [{}]", index, indexPaths.length, indexPaths); IOUtils.rm(indexPaths); @@ -663,6 +676,18 @@ public void deleteIndexDirectoryUnderLock(Index index, IndexSettings indexSettin } } + private void deleteIndexFileCacheDirectory(Index index) { + final Path indexCachePath = fileCacheNodePath().fileCachePath.resolve(index.getUUID()); + logger.trace("deleting index {} file cache directory, path: [{}]", index, indexCachePath); + if (Files.exists(indexCachePath)) { + try { + IOUtils.rm(indexCachePath); + } catch (IOException e) { + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); + } + } + } + /** * Tries to lock all local shards for the given index. If any of the shard locks can't be acquired * a {@link ShardLockObtainFailedException} is thrown and all previously acquired locks are released. @@ -1387,4 +1412,18 @@ private static void tryWriteTempFile(Path path) throws IOException { } } } + + /** + * A listener that is executed on per-index and per-shard store events, like deleting shard path + * + * @opensearch.internal + */ + public interface IndexStoreListener { + default void beforeShardPathDeleted(ShardId shardId, IndexSettings indexSettings, NodeEnvironment env) {} + + default void beforeIndexPathDeleted(Index index, IndexSettings indexSettings, NodeEnvironment env) {} + + IndexStoreListener EMPTY = new IndexStoreListener() { + }; + } } diff --git a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java index fb89e651e7616..0261ab24dfa7a 100644 --- a/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java +++ b/server/src/main/java/org/opensearch/index/store/remote/filecache/FileCacheCleaner.java @@ -11,16 +11,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.opensearch.common.settings.Settings; +import org.opensearch.common.inject.Provider; import org.opensearch.common.util.io.IOUtils; import org.opensearch.core.index.Index; import org.opensearch.core.index.shard.ShardId; import org.opensearch.env.NodeEnvironment; -import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; -import org.opensearch.index.shard.IndexEventListener; import org.opensearch.index.shard.ShardPath; -import org.opensearch.indices.cluster.IndicesClusterStateService; import java.io.IOException; import java.nio.file.DirectoryStream; @@ -30,79 +27,90 @@ import static org.opensearch.index.store.remote.directory.RemoteSnapshotDirectoryFactory.LOCAL_STORE_LOCATION; /** - * IndexEventListener to clean up file cache when the index is deleted. The cached entries will be eligible + * IndexStoreListener to clean up file cache when the index is deleted. The cached entries will be eligible * for eviction when the shard is deleted, but this listener deterministically removes entries from memory and * from disk at the time of shard deletion as opposed to waiting for the cache to need to perform eviction. * * @opensearch.internal */ -public class FileCacheCleaner implements IndexEventListener { - private static final Logger log = LogManager.getLogger(FileCacheCleaner.class); +public class FileCacheCleaner implements NodeEnvironment.IndexStoreListener { + private static final Logger logger = LogManager.getLogger(FileCacheCleaner.class); - private final NodeEnvironment nodeEnvironment; - private final FileCache fileCache; + private final Provider<FileCache> fileCacheProvider; - public FileCacheCleaner(NodeEnvironment nodeEnvironment, FileCache fileCache) { - this.nodeEnvironment = nodeEnvironment; - this.fileCache = fileCache; + public FileCacheCleaner(Provider<FileCache> fileCacheProvider) { + this.fileCacheProvider = fileCacheProvider; } /** - * before shard deleted and after shard closed, cleans up the corresponding index file path entries from FC. - * @param shardId The shard id - * @param settings the shards index settings + * before shard path deleted, cleans up the corresponding index file path entries from FC and delete the corresponding shard file + * cache path. + * + * @param shardId the shard id + * @param indexSettings the index settings + * @param nodeEnvironment the node environment */ @Override - public void beforeIndexShardDeleted(ShardId shardId, Settings settings) { + public void beforeShardPathDeleted(ShardId shardId, IndexSettings indexSettings, NodeEnvironment nodeEnvironment) { + if (indexSettings.isRemoteSnapshot()) { + final ShardPath shardPath = ShardPath.loadFileCachePath(nodeEnvironment, shardId); + cleanupShardFileCache(shardPath); + deleteShardFileCacheDirectory(shardPath); + } + } + + /** + * Cleans up the corresponding index file path entries from FileCache + * + * @param shardPath the shard path + */ + private void cleanupShardFileCache(ShardPath shardPath) { try { - if (isRemoteSnapshot(settings)) { - final ShardPath shardPath = ShardPath.loadFileCachePath(nodeEnvironment, shardId); - final Path localStorePath = shardPath.getDataPath().resolve(LOCAL_STORE_LOCATION); - try (DirectoryStream<Path> ds = Files.newDirectoryStream(localStorePath)) { - for (Path subPath : ds) { - fileCache.remove(subPath.toRealPath()); - } + final FileCache fc = fileCacheProvider.get(); + assert fc != null; + final Path localStorePath = shardPath.getDataPath().resolve(LOCAL_STORE_LOCATION); + try (DirectoryStream<Path> ds = Files.newDirectoryStream(localStorePath)) { + for (Path subPath : ds) { + fc.remove(subPath.toRealPath()); } } } catch (IOException ioe) { - log.error(() -> new ParameterizedMessage("Error removing items from cache during shard deletion {}", shardId), ioe); + logger.error( + () -> new ParameterizedMessage("Error removing items from cache during shard deletion {}", shardPath.getShardId()), + ioe + ); } } - @Override - public void afterIndexShardDeleted(ShardId shardId, Settings settings) { - if (isRemoteSnapshot(settings)) { - final Path path = ShardPath.loadFileCachePath(nodeEnvironment, shardId).getDataPath(); - try { - if (Files.exists(path)) { - IOUtils.rm(path); - } - } catch (IOException e) { - log.error(() -> new ParameterizedMessage("Failed to delete cache path for shard {}", shardId), e); + private void deleteShardFileCacheDirectory(ShardPath shardPath) { + final Path path = shardPath.getDataPath(); + try { + if (Files.exists(path)) { + IOUtils.rm(path); } + } catch (IOException e) { + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for shard {}", shardPath.getShardId()), e); } } + /** + * before index path deleted, delete the corresponding index file cache path. + * + * @param index the index + * @param indexSettings the index settings + * @param nodeEnvironment the node environment + */ @Override - public void afterIndexRemoved( - Index index, - IndexSettings indexSettings, - IndicesClusterStateService.AllocatedIndices.IndexRemovalReason reason - ) { - if (isRemoteSnapshot(indexSettings.getSettings()) - && reason == IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED) { + public void beforeIndexPathDeleted(Index index, IndexSettings indexSettings, NodeEnvironment nodeEnvironment) { + if (indexSettings.isRemoteSnapshot()) { final Path indexCachePath = nodeEnvironment.fileCacheNodePath().fileCachePath.resolve(index.getUUID()); if (Files.exists(indexCachePath)) { try { IOUtils.rm(indexCachePath); } catch (IOException e) { - log.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); + logger.error(() -> new ParameterizedMessage("Failed to delete cache path for index {}", index), e); } } } } - - private static boolean isRemoteSnapshot(Settings settings) { - return IndexModule.Type.REMOTE_SNAPSHOT.match(settings.get(IndexModule.INDEX_STORE_TYPE_SETTING.getKey())); - } } diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index db5b93f073b03..c83f2a4c5cd5d 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -135,7 +135,6 @@ import org.opensearch.index.shard.IndexingOperationListener; import org.opensearch.index.shard.IndexingStats; import org.opensearch.index.shard.IndexingStats.Stats.DocStatusStats; -import org.opensearch.index.store.remote.filecache.FileCacheCleaner; import org.opensearch.index.translog.InternalTranslogFactory; import org.opensearch.index.translog.RemoteBlobStoreInternalTranslogFactory; import org.opensearch.index.translog.TranslogFactory; @@ -362,7 +361,6 @@ public class IndicesService extends AbstractLifecycleComponent private final BiFunction<IndexSettings, ShardRouting, TranslogFactory> translogFactorySupplier; private volatile TimeValue clusterDefaultRefreshInterval; private volatile TimeValue clusterRemoteTranslogBufferInterval; - private final FileCacheCleaner fileCacheCleaner; private final SearchRequestStats searchRequestStats; @@ -395,7 +393,6 @@ public IndicesService( Map<String, IndexStorePlugin.RecoveryStateFactory> recoveryStateFactories, IndexStorePlugin.DirectoryFactory remoteDirectoryFactory, Supplier<RepositoriesService> repositoriesServiceSupplier, - FileCacheCleaner fileCacheCleaner, SearchRequestStats searchRequestStats, @Nullable RemoteStoreStatsTrackerFactory remoteStoreStatsTrackerFactory, RecoverySettings recoverySettings @@ -450,7 +447,6 @@ public void onRemoval(ShardId shardId, String fieldName, boolean wasEvicted, lon this.directoryFactories = directoryFactories; this.recoveryStateFactories = recoveryStateFactories; - this.fileCacheCleaner = fileCacheCleaner; // doClose() is called when shutting down a node, yet there might still be ongoing requests // that we need to wait for before closing some resources such as the caches. In order to // avoid closing these resources while ongoing requests are still being processed, we use a @@ -766,7 +762,6 @@ public void onStoreClosed(ShardId shardId) { }; finalListeners.add(onStoreClose); finalListeners.add(oldShardsStats); - finalListeners.add(fileCacheCleaner); final IndexService indexService = createIndexService( CREATE_INDEX, indexMetadata, diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 8510122c39fcb..547f610f4a752 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -527,7 +527,11 @@ protected Node( */ this.environment = new Environment(settings, initialEnvironment.configDir(), Node.NODE_LOCAL_STORAGE_SETTING.get(settings)); Environment.assertEquivalent(initialEnvironment, this.environment); - nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + if (DiscoveryNode.isSearchNode(settings) == false) { + nodeEnvironment = new NodeEnvironment(tmpSettings, environment); + } else { + nodeEnvironment = new NodeEnvironment(settings, environment, new FileCacheCleaner(this::fileCache)); + } logger.info( "node name [{}], node ID [{}], cluster name [{}], roles {}", NODE_NAME_SETTING.get(tmpSettings), @@ -678,7 +682,6 @@ protected Node( ); // File cache will be initialized by the node once circuit breakers are in place. initializeFileCache(settings, circuitBreakerService.getBreaker(CircuitBreaker.REQUEST)); - final FileCacheCleaner fileCacheCleaner = new FileCacheCleaner(nodeEnvironment, fileCache); final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool, fileCache); pluginsService.filterPlugins(CircuitBreakerPlugin.class).forEach(plugin -> { @@ -812,7 +815,6 @@ protected Node( recoveryStateFactories, remoteDirectoryFactory, repositoriesServiceReference::get, - fileCacheCleaner, searchRequestStats, remoteStoreStatsTrackerFactory, recoverySettings diff --git a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java index 7f669934579ee..962eb743dca6e 100644 --- a/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java +++ b/server/src/test/java/org/opensearch/env/NodeEnvironmentTests.java @@ -359,6 +359,57 @@ protected void doRun() throws Exception { env.close(); } + public void testIndexStoreListener() throws Exception { + final AtomicInteger shardCounter = new AtomicInteger(0); + final AtomicInteger indexCounter = new AtomicInteger(0); + final Index index = new Index("foo", "fooUUID"); + final ShardId shardId = new ShardId(index, 0); + final NodeEnvironment.IndexStoreListener listener = new NodeEnvironment.IndexStoreListener() { + @Override + public void beforeShardPathDeleted(ShardId inShardId, IndexSettings indexSettings, NodeEnvironment env) { + assertEquals(shardId, inShardId); + shardCounter.incrementAndGet(); + } + + @Override + public void beforeIndexPathDeleted(Index inIndex, IndexSettings indexSettings, NodeEnvironment env) { + assertEquals(index, inIndex); + indexCounter.incrementAndGet(); + } + }; + final NodeEnvironment env = newNodeEnvironment(listener); + + for (Path path : env.indexPaths(index)) { + Files.createDirectories(path.resolve("0")); + } + + for (Path path : env.indexPaths(index)) { + assertTrue(Files.exists(path.resolve("0"))); + } + assertEquals(0, shardCounter.get()); + + env.deleteShardDirectorySafe(new ShardId(index, 0), idxSettings); + + for (Path path : env.indexPaths(index)) { + assertFalse(Files.exists(path.resolve("0"))); + } + assertEquals(1, shardCounter.get()); + + for (Path path : env.indexPaths(index)) { + assertTrue(Files.exists(path)); + } + assertEquals(0, indexCounter.get()); + + env.deleteIndexDirectorySafe(index, 5000, idxSettings); + + for (Path path : env.indexPaths(index)) { + assertFalse(Files.exists(path)); + } + assertEquals(1, indexCounter.get()); + assertTrue("LockedShards: " + env.lockedShards(), env.lockedShards().isEmpty()); + env.close(); + } + public void testStressShardLock() throws IOException, InterruptedException { class Int { int value = 0; @@ -629,6 +680,11 @@ public NodeEnvironment newNodeEnvironment() throws IOException { return newNodeEnvironment(Settings.EMPTY); } + public NodeEnvironment newNodeEnvironment(NodeEnvironment.IndexStoreListener listener) throws IOException { + Settings build = buildEnvSettings(Settings.EMPTY); + return new NodeEnvironment(build, TestEnvironment.newEnvironment(build), listener); + } + @Override public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException { Settings build = buildEnvSettings(settings); diff --git a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java index 04434fa52e555..e2a6a4011a6b7 100644 --- a/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java +++ b/server/src/test/java/org/opensearch/index/store/remote/filecache/FileCacheCleanerTests.java @@ -18,7 +18,6 @@ import org.opensearch.env.NodeEnvironment; import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.ShardPath; -import org.opensearch.indices.cluster.IndicesClusterStateService; import org.opensearch.test.OpenSearchTestCase; import org.hamcrest.MatcherAssert; import org.junit.After; @@ -59,7 +58,7 @@ public class FileCacheCleanerTests extends OpenSearchTestCase { @Before public void setUpFileCache() throws IOException { env = newNodeEnvironment(SETTINGS); - cleaner = new FileCacheCleaner(env, fileCache); + cleaner = new FileCacheCleaner(() -> fileCache); files.put(SHARD_0, addFile(fileCache, env, SHARD_0)); files.put(SHARD_1, addFile(fileCache, env, SHARD_1)); MatcherAssert.assertThat(fileCache.size(), equalTo(2L)); @@ -103,12 +102,11 @@ public void testShardRemoved() { final Path cachePath = ShardPath.loadFileCachePath(env, SHARD_0).getDataPath(); assertTrue(Files.exists(cachePath)); - cleaner.beforeIndexShardDeleted(SHARD_0, SETTINGS); + cleaner.beforeShardPathDeleted(SHARD_0, INDEX_SETTINGS, env); MatcherAssert.assertThat(fileCache.size(), equalTo(1L)); assertNull(fileCache.get(files.get(SHARD_0))); assertFalse(Files.exists(files.get(SHARD_0))); assertTrue(Files.exists(files.get(SHARD_1))); - cleaner.afterIndexShardDeleted(SHARD_0, SETTINGS); assertFalse(Files.exists(cachePath)); } @@ -116,15 +114,9 @@ public void testIndexRemoved() { final Path indexCachePath = env.fileCacheNodePath().fileCachePath.resolve(SHARD_0.getIndex().getUUID()); assertTrue(Files.exists(indexCachePath)); - cleaner.beforeIndexShardDeleted(SHARD_0, SETTINGS); - cleaner.afterIndexShardDeleted(SHARD_0, SETTINGS); - cleaner.beforeIndexShardDeleted(SHARD_1, SETTINGS); - cleaner.afterIndexShardDeleted(SHARD_1, SETTINGS); - cleaner.afterIndexRemoved( - SHARD_0.getIndex(), - INDEX_SETTINGS, - IndicesClusterStateService.AllocatedIndices.IndexRemovalReason.DELETED - ); + cleaner.beforeShardPathDeleted(SHARD_0, INDEX_SETTINGS, env); + cleaner.beforeShardPathDeleted(SHARD_1, INDEX_SETTINGS, env); + cleaner.beforeIndexPathDeleted(SHARD_0.getIndex(), INDEX_SETTINGS, env); MatcherAssert.assertThat(fileCache.size(), equalTo(0L)); assertFalse(Files.exists(indexCachePath)); } diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 9bb1f51c51cf6..7c50e961853b5 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -188,7 +188,6 @@ import org.opensearch.index.shard.PrimaryReplicaSyncer; import org.opensearch.index.store.RemoteSegmentStoreDirectoryFactory; import org.opensearch.index.store.remote.filecache.FileCache; -import org.opensearch.index.store.remote.filecache.FileCacheCleaner; import org.opensearch.index.store.remote.filecache.FileCacheStats; import org.opensearch.indices.IndicesModule; import org.opensearch.indices.IndicesService; @@ -2037,7 +2036,6 @@ public void onFailure(final Exception e) { final MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); final SetOnce<RepositoriesService> repositoriesServiceReference = new SetOnce<>(); repositoriesServiceReference.set(repositoriesService); - FileCacheCleaner fileCacheCleaner = new FileCacheCleaner(nodeEnv, null); indicesService = new IndicesService( settings, mock(PluginsService.class), @@ -2072,7 +2070,6 @@ public void onFailure(final Exception e) { emptyMap(), new RemoteSegmentStoreDirectoryFactory(() -> repositoriesService, threadPool), repositoriesServiceReference::get, - fileCacheCleaner, null, new RemoteStoreStatsTrackerFactory(clusterService, settings), DefaultRecoverySettings.INSTANCE From a8dd6a0d6c272dd9c92fdac00f11715a1a185d25 Mon Sep 17 00:00:00 2001 From: Jay Deng <jayd0104@gmail.com> Date: Fri, 2 Feb 2024 18:46:33 -0800 Subject: [PATCH 164/178] Fix bwc for concurrent segment search GA (#12156) Signed-off-by: Jay Deng <jayd0104@gmail.com> --- .../main/resources/rest-api-spec/test/cat.shards/10_basic.yml | 4 ++-- .../resources/rest-api-spec/test/cat.thread_pool/10_basic.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml index 91521029e2d31..29fbf55417961 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.shards/10_basic.yml @@ -1,13 +1,13 @@ "Help": - skip: - version: " - 2.99.99" + version: " - 2.11.99" reason: deleted docs and concurrent search are added in 2.12.0 features: node_selector - do: cat.shards: help: true node_selector: - version: "3.0.0 - " + version: "2.12.0 - " - match: $body: | diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml index 1b8ffbf762b8f..39c8040993f2a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml @@ -27,7 +27,7 @@ --- "Test cat thread_pool total_wait_time output with concurrent search thread_pool": - skip: - version: " - 2.99.99" + version: " - 2.11.99" reason: index_search thread_pool was introduced in V_2.12.0 - do: From 4d055b85ab48dcb52ec48477fad44ccf06ce2194 Mon Sep 17 00:00:00 2001 From: Gaurav Bafna <85113518+gbbafna@users.noreply.github.com> Date: Mon, 5 Feb 2024 12:18:28 +0530 Subject: [PATCH 165/178] [Remote Store] Waiting for remote store upload in snapshot/local recovery (#11720) * Giving time for snapshot recovery/local time to upload all the data to remote Signed-off-by: Gaurav Bafna <gbbafna@amazon.com> --- .../indices/create/RemoteCloneIndexIT.java | 59 +++++++ .../RemoteStoreBaseIntegTestCase.java | 21 ++- .../remotestore/RemoteStoreRestoreIT.java | 19 ++- .../snapshots/SystemRepositoryIT.java | 11 +- .../common/settings/ClusterSettings.java | 1 + .../opensearch/index/shard/IndexShard.java | 28 ++- .../shard/RemoteStoreRefreshListener.java | 22 ++- .../opensearch/index/shard/StoreRecovery.java | 38 ++++- .../indices/recovery/RecoverySettings.java | 21 +++ .../repositories/RepositoriesModule.java | 6 + .../fs/ReloadableFsRepository.java | 161 +++++++++++++++++- .../RemoteStoreRefreshListenerTests.java | 42 +++++ 12 files changed, 404 insertions(+), 25 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java index a081110e6c5a1..f50e8fd0a38cf 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java @@ -50,6 +50,8 @@ import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase; import org.opensearch.test.VersionUtils; +import java.util.concurrent.ExecutionException; + import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; @@ -130,4 +132,61 @@ public void testCreateCloneIndex() { } + public void testCreateCloneIndexFailure() throws ExecutionException, InterruptedException { + Version version = VersionUtils.randomIndexCompatibleVersion(random()); + int numPrimaryShards = 1; + prepareCreate("source").setSettings( + Settings.builder().put(indexSettings()).put("number_of_shards", numPrimaryShards).put("index.version.created", version) + ).get(); + final int docs = 2; + for (int i = 0; i < docs; i++) { + client().prepareIndex("source").setSource("{\"foo\" : \"bar\", \"i\" : " + i + "}", MediaTypeRegistry.JSON).get(); + } + internalCluster().ensureAtLeastNumDataNodes(2); + // ensure all shards are allocated otherwise the ensure green below might not succeed since we require the merge node + // if we change the setting too quickly we will end up with one replica unassigned which can't be assigned anymore due + // to the require._name below. + ensureGreen(); + // relocate all shards to one node such that we can merge it. + client().admin().indices().prepareUpdateSettings("source").setSettings(Settings.builder().put("index.blocks.write", true)).get(); + ensureGreen(); + + // disable rebalancing to be able to capture the right stats. balancing can move the target primary + // making it hard to pin point the source shards. + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none")) + .get(); + try { + setFailRate(REPOSITORY_NAME, 100); + + client().admin() + .indices() + .prepareResizeIndex("source", "target") + .setResizeType(ResizeType.CLONE) + .setWaitForActiveShards(0) + .setSettings(Settings.builder().put("index.number_of_replicas", 0).putNull("index.blocks.write").build()) + .get(); + + Thread.sleep(2000); + ensureYellow("target"); + + } catch (ExecutionException | InterruptedException e) { + throw new RuntimeException(e); + } finally { + setFailRate(REPOSITORY_NAME, 0); + ensureGreen(); + // clean up + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings( + Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), (String) null) + ) + .get(); + } + + } + } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index 751de66a97806..e43ff9a412784 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -9,6 +9,8 @@ package org.opensearch.remotestore; import org.opensearch.action.admin.cluster.remotestore.restore.RestoreRemoteStoreRequest; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; +import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; import org.opensearch.action.admin.indices.get.GetIndexRequest; import org.opensearch.action.admin.indices.get.GetIndexResponse; import org.opensearch.action.bulk.BulkItemResponse; @@ -37,7 +39,7 @@ import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.blobstore.BlobStoreRepository; -import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.OpenSearchIntegTestCase; import org.junit.After; @@ -60,6 +62,7 @@ import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; +import static org.opensearch.repositories.fs.ReloadableFsRepository.REPOSITORIES_FAILRATE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; public class RemoteStoreBaseIntegTestCase extends OpenSearchIntegTestCase { @@ -146,6 +149,18 @@ protected Settings nodeSettings(int nodeOrdinal) { } } + protected void setFailRate(String repoName, int value) throws ExecutionException, InterruptedException { + GetRepositoriesRequest gr = new GetRepositoriesRequest(new String[] { repoName }); + GetRepositoriesResponse res = client().admin().cluster().getRepositories(gr).get(); + RepositoryMetadata rmd = res.repositories().get(0); + Settings.Builder settings = Settings.builder() + .put("location", rmd.settings().get("location")) + .put(REPOSITORIES_FAILRATE_SETTING.getKey(), value); + assertAcked( + client().admin().cluster().preparePutRepository(repoName).setType(ReloadableFsRepository.TYPE).setSettings(settings).get() + ); + } + public Settings indexSettings() { return defaultIndexSettings(); } @@ -224,10 +239,10 @@ public static Settings buildRemoteStoreNodeAttributes( return buildRemoteStoreNodeAttributes( segmentRepoName, segmentRepoPath, - FsRepository.TYPE, + ReloadableFsRepository.TYPE, translogRepoName, translogRepoPath, - FsRepository.TYPE, + ReloadableFsRepository.TYPE, withRateLimiterAttributes ); } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java index dfa5528eafcf2..94acf2b1dbb27 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreRestoreIT.java @@ -24,6 +24,7 @@ import org.opensearch.indices.IndicesService; import org.opensearch.repositories.RepositoriesService; import org.opensearch.repositories.Repository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.InternalTestCluster; import org.opensearch.test.OpenSearchIntegTestCase; @@ -479,7 +480,14 @@ public void testRateLimitedRemoteDownloads() throws Exception { settingsMap.entrySet().forEach(entry -> settings.put(entry.getKey(), entry.getValue())); settings.put("location", segmentRepoPath).put("max_remote_download_bytes_per_sec", 4, ByteSizeUnit.KB); - assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(settings).get()); + assertAcked( + client().admin() + .cluster() + .preparePutRepository(REPOSITORY_NAME) + .setType(ReloadableFsRepository.TYPE) + .setSettings(settings) + .get() + ); for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { Repository segmentRepo = repositoriesService.repository(REPOSITORY_NAME); @@ -508,7 +516,14 @@ public void testRateLimitedRemoteDownloads() throws Exception { // revert repo metadata to pass asserts on repo metadata vs. node attrs during teardown // https://github.com/opensearch-project/OpenSearch/pull/9569#discussion_r1345668700 settings.remove("max_remote_download_bytes_per_sec"); - assertAcked(client().admin().cluster().preparePutRepository(REPOSITORY_NAME).setType("fs").setSettings(settings).get()); + assertAcked( + client().admin() + .cluster() + .preparePutRepository(REPOSITORY_NAME) + .setType(ReloadableFsRepository.TYPE) + .setSettings(settings) + .get() + ); for (RepositoriesService repositoriesService : internalCluster().getDataNodeInstances(RepositoriesService.class)) { Repository segmentRepo = repositoriesService.repository(REPOSITORY_NAME); assertNull(segmentRepo.getMetadata().settings().get("max_remote_download_bytes_per_sec")); diff --git a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java index f50fc691fb232..28b84655a2cc7 100644 --- a/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/snapshots/SystemRepositoryIT.java @@ -12,7 +12,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.repositories.RepositoryException; -import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.test.OpenSearchIntegTestCase; import org.junit.Before; @@ -53,7 +53,7 @@ public void testRestrictedSettingsCantBeUpdated() { assertEquals( e.getMessage(), "[system-repo-name] trying to modify an unmodifiable attribute type of system " - + "repository from current value [fs] to new value [mock]" + + "repository from current value [reloadable-fs] to new value [mock]" ); } @@ -65,7 +65,12 @@ public void testSystemRepositoryNonRestrictedSettingsCanBeUpdated() { final Settings.Builder repoSettings = Settings.builder().put("location", absolutePath).put("chunk_size", new ByteSizeValue(20)); assertAcked( - client.admin().cluster().preparePutRepository(systemRepoName).setType(FsRepository.TYPE).setSettings(repoSettings).get() + client.admin() + .cluster() + .preparePutRepository(systemRepoName) + .setType(ReloadableFsRepository.TYPE) + .setSettings(repoSettings) + .get() ); } } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 14b915935062c..0c97d62c44a5e 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -293,6 +293,7 @@ public void apply(Settings value, Settings current, Settings previous) { RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_FILE_CHUNKS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_OPERATIONS_SETTING, RecoverySettings.INDICES_RECOVERY_MAX_CONCURRENT_REMOTE_STORE_STREAMS_SETTING, + RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT, RecoverySettings.CLUSTER_REMOTE_INDEX_SEGMENT_METADATA_RETENTION_MAX_COUNT_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_PRIMARIES_RECOVERIES_SETTING, ThrottlingAllocationDecider.CLUSTER_ROUTING_ALLOCATION_NODE_INITIAL_REPLICAS_RECOVERIES_SETTING, diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 5834eabfa9af0..977155a1cbb72 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -2025,23 +2025,35 @@ public RemoteSegmentStoreDirectory getRemoteDirectory() { } /** - Returns true iff it is able to verify that remote segment store - is in sync with local + * Returns true iff it is able to verify that remote segment store + * is in sync with local */ boolean isRemoteSegmentStoreInSync() { assert indexSettings.isRemoteStoreEnabled(); try { RemoteSegmentStoreDirectory directory = getRemoteDirectory(); if (directory.readLatestMetadataFile() != null) { - // verifying that all files except EXCLUDE_FILES are uploaded to the remote Collection<String> uploadFiles = directory.getSegmentsUploadedToRemoteStore().keySet(); - SegmentInfos segmentInfos = store.readLastCommittedSegmentsInfo(); - Collection<String> localFiles = segmentInfos.files(true); - if (uploadFiles.containsAll(localFiles)) { - return true; + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = getSegmentInfosSnapshot()) { + Collection<String> localSegmentInfosFiles = segmentInfosGatedCloseable.get().files(true); + Set<String> localFiles = new HashSet<>(localSegmentInfosFiles); + // verifying that all files except EXCLUDE_FILES are uploaded to the remote + localFiles.removeAll(RemoteStoreRefreshListener.EXCLUDE_FILES); + if (uploadFiles.containsAll(localFiles)) { + return true; + } + logger.debug( + () -> new ParameterizedMessage( + "RemoteSegmentStoreSyncStatus localSize={} remoteSize={}", + localFiles.size(), + uploadFiles.size() + ) + ); } } - } catch (IOException e) { + } catch (AlreadyClosedException e) { + throw e; + } catch (Throwable e) { logger.error("Exception while reading latest metadata", e); } return false; diff --git a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java index f75e86540ed9b..7bb80b736693f 100644 --- a/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java +++ b/server/src/main/java/org/opensearch/index/shard/RemoteStoreRefreshListener.java @@ -172,13 +172,33 @@ private boolean shouldSync(boolean didRefresh, boolean skipPrimaryTermCheck) { // When the shouldSync is called the first time, then 1st condition on primary term is true. But after that // we update the primary term and the same condition would not evaluate to true again in syncSegments. // Below check ensures that if there is commit, then that gets picked up by both 1st and 2nd shouldSync call. - || isRefreshAfterCommitSafe(); + || isRefreshAfterCommitSafe() + || isRemoteSegmentStoreInSync() == false; if (shouldSync || skipPrimaryTermCheck) { return shouldSync; } return this.primaryTerm != indexShard.getOperationPrimaryTerm(); } + /** + * Checks if all files present in local store are uploaded to remote store or part of excluded files. + * + * Different from IndexShard#isRemoteSegmentStoreInSync as + * it uses files uploaded cache in RemoteDirector and it doesn't make a remote store call. + * Doesn't throw an exception on store getting closed as store will be open + * + * + * @return true iff all the local files are uploaded to remote store. + */ + boolean isRemoteSegmentStoreInSync() { + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { + return segmentInfosGatedCloseable.get().files(true).stream().allMatch(this::skipUpload); + } catch (Throwable throwable) { + logger.error("Throwable thrown during isRemoteSegmentStoreInSync", throwable); + } + return false; + } + /* @return false if retry is needed */ diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index 5b1940bb1d9a5..3faef2da05320 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -38,11 +38,13 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.search.Sort; +import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.opensearch.ExceptionsHelper; +import org.opensearch.OpenSearchException; import org.opensearch.action.StepListener; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.cluster.metadata.MappingMetadata; @@ -191,7 +193,8 @@ void recoverFromLocalShards( // just trigger a merge to do housekeeping on the // copied segments - we will also see them in stats etc. indexShard.getEngine().forceMerge(false, -1, false, false, false, UUIDs.randomBase64UUID()); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { throw new IndexShardRecoveryException( indexShard.shardId(), @@ -432,7 +435,8 @@ void recoverFromSnapshotAndRemoteStore( } indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); return; @@ -717,7 +721,8 @@ private void restore( } indexShard.getEngine().fillSeqNoGaps(indexShard.getPendingPrimaryTerm()); indexShard.finalizeRecovery(); - if (indexShard.isRemoteTranslogEnabled()) { + if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { + waitForRemoteStoreSync(indexShard); if (indexShard.isRemoteSegmentStoreInSync() == false) { listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); return; @@ -791,4 +796,31 @@ private void bootstrap(final IndexShard indexShard, final Store store) throws IO ); store.associateIndexWithNewTranslog(translogUUID); } + + /* + Blocks the calling thread, waiting for the remote store to get synced till internal Remote Upload Timeout + */ + private void waitForRemoteStoreSync(IndexShard indexShard) { + if (indexShard.shardRouting.primary() == false) { + return; + } + long startNanos = System.nanoTime(); + + while (System.nanoTime() - startNanos < indexShard.getRecoverySettings().internalRemoteUploadTimeout().nanos()) { + try { + if (indexShard.isRemoteSegmentStoreInSync()) { + break; + } else { + try { + Thread.sleep(TimeValue.timeValueMinutes(1).seconds()); + } catch (InterruptedException ie) { + throw new OpenSearchException("Interrupted waiting for completion of [{}]", ie); + } + } + } catch (AlreadyClosedException e) { + // There is no point in waiting as shard is now closed . + return; + } + } + } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java index 5351ae7fe08dd..2b41eb125d808 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoverySettings.java @@ -46,6 +46,8 @@ import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.common.unit.ByteSizeValue; +import java.util.concurrent.TimeUnit; + /** * Settings for the recovery mechanism * @@ -176,6 +178,13 @@ public class RecoverySettings { Property.Dynamic ); + public static final Setting<TimeValue> INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT = Setting.timeSetting( + "indices.recovery.internal_remote_upload_timeout", + new TimeValue(1, TimeUnit.HOURS), + Property.Dynamic, + Property.NodeScope + ); + // choose 512KB-16B to ensure that the resulting byte[] is not a humongous allocation in G1. public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512 * 1024 - 16, ByteSizeUnit.BYTES); @@ -193,6 +202,7 @@ public class RecoverySettings { private volatile int minRemoteSegmentMetadataFiles; private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; + private volatile TimeValue internalRemoteUploadTimeout; public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { this.retryDelayStateSync = INDICES_RECOVERY_RETRY_DELAY_STATE_SYNC_SETTING.get(settings); @@ -216,6 +226,7 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { } logger.debug("using max_bytes_per_sec[{}]", maxBytesPerSec); + this.internalRemoteUploadTimeout = INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.get(settings); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_BYTES_PER_SEC_SETTING, this::setMaxBytesPerSec); clusterSettings.addSettingsUpdateConsumer(INDICES_RECOVERY_MAX_CONCURRENT_FILE_CHUNKS_SETTING, this::setMaxConcurrentFileChunks); @@ -237,6 +248,8 @@ public RecoverySettings(Settings settings, ClusterSettings clusterSettings) { CLUSTER_REMOTE_INDEX_SEGMENT_METADATA_RETENTION_MAX_COUNT_SETTING, this::setMinRemoteSegmentMetadataFiles ); + clusterSettings.addSettingsUpdateConsumer(INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT, this::setInternalRemoteUploadTimeout); + } public RateLimiter rateLimiter() { @@ -267,6 +280,10 @@ public TimeValue internalActionLongTimeout() { return internalActionLongTimeout; } + public TimeValue internalRemoteUploadTimeout() { + return internalRemoteUploadTimeout; + } + public ByteSizeValue getChunkSize() { return chunkSize; } @@ -298,6 +315,10 @@ public void setInternalActionLongTimeout(TimeValue internalActionLongTimeout) { this.internalActionLongTimeout = internalActionLongTimeout; } + public void setInternalRemoteUploadTimeout(TimeValue internalRemoteUploadTimeout) { + this.internalRemoteUploadTimeout = internalRemoteUploadTimeout; + } + private void setMaxBytesPerSec(ByteSizeValue maxBytesPerSec) { this.maxBytesPerSec = maxBytesPerSec; if (maxBytesPerSec.getBytes() <= 0) { diff --git a/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java b/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java index cc4d3c006d84c..afb6e530b0eec 100644 --- a/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/opensearch/repositories/RepositoriesModule.java @@ -39,6 +39,7 @@ import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.RepositoryPlugin; import org.opensearch.repositories.fs.FsRepository; +import org.opensearch.repositories.fs.ReloadableFsRepository; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -71,6 +72,11 @@ public RepositoriesModule( metadata -> new FsRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) ); + factories.put( + ReloadableFsRepository.TYPE, + metadata -> new ReloadableFsRepository(metadata, env, namedXContentRegistry, clusterService, recoverySettings) + ); + for (RepositoryPlugin repoPlugin : repoPlugins) { Map<String, Repository.Factory> newRepoTypes = repoPlugin.getRepositories( env, diff --git a/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java b/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java index c06c805a39396..e8020a432a58a 100644 --- a/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java +++ b/server/src/main/java/org/opensearch/repositories/fs/ReloadableFsRepository.java @@ -8,18 +8,52 @@ package org.opensearch.repositories.fs; +import org.opensearch.OpenSearchException; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.Randomness; +import org.opensearch.common.blobstore.BlobContainer; +import org.opensearch.common.blobstore.BlobPath; +import org.opensearch.common.blobstore.BlobStore; +import org.opensearch.common.blobstore.fs.FsBlobContainer; +import org.opensearch.common.blobstore.fs.FsBlobStore; +import org.opensearch.common.settings.Setting; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.indices.recovery.RecoverySettings; +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.Random; + /** - * Extension of {@link FsRepository} that can be reloaded inplace + * Extension of {@link FsRepository} that can be reloaded inplace , supports failing operation and slowing it down * * @opensearch.internal */ public class ReloadableFsRepository extends FsRepository { + public static final String TYPE = "reloadable-fs"; + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public static final Setting<Integer> REPOSITORIES_FAILRATE_SETTING = Setting.intSetting( + "repositories.fail.rate", + 0, + 0, + 100, + Setting.Property.NodeScope + ); + + public static final Setting<Integer> REPOSITORIES_SLOWDOWN_SETTING = Setting.intSetting( + "repositories.slowdown", + 0, + 0, + 100, + Setting.Property.NodeScope + ); + /** * Constructs a shared file system repository that is reloadable in-place. */ @@ -31,6 +65,11 @@ public ReloadableFsRepository( RecoverySettings recoverySettings ) { super(metadata, environment, namedXContentRegistry, clusterService, recoverySettings); + fail = new FailSwitch(); + fail.failRate(REPOSITORIES_FAILRATE_SETTING.get(metadata.settings())); + slowDown = new SlowDownWriteSwitch(); + slowDown.setSleepSeconds(REPOSITORIES_SLOWDOWN_SETTING.get(metadata.settings())); + readRepositoryMetadata(); } @Override @@ -40,12 +79,124 @@ public boolean isReloadable() { @Override public void reload(RepositoryMetadata repositoryMetadata) { - if (isReloadable() == false) { - return; - } - super.reload(repositoryMetadata); + readRepositoryMetadata(); validateLocation(); readMetadata(); } + + private void readRepositoryMetadata() { + fail.failRate(REPOSITORIES_FAILRATE_SETTING.get(metadata.settings())); + slowDown.setSleepSeconds(REPOSITORIES_SLOWDOWN_SETTING.get(metadata.settings())); + } + + protected BlobStore createBlobStore() throws Exception { + final String location = REPOSITORIES_LOCATION_SETTING.get(getMetadata().settings()); + final Path locationFile = environment.resolveRepoFile(location); + return new ThrowingBlobStore(bufferSize, locationFile, isReadOnly(), fail, slowDown); + } + + // A random integer from min-max (inclusive). + public static int randomIntBetween(int min, int max) { + Random random = Randomness.get(); + return random.nextInt(max - min + 1) + min; + } + + static class FailSwitch { + private volatile int failRate; + private volatile boolean onceFailedFailAlways = false; + + public boolean fail() { + final int rnd = randomIntBetween(1, 100); + boolean fail = rnd <= failRate; + if (fail && onceFailedFailAlways) { + failAlways(); + } + return fail; + } + + public void failAlways() { + failRate = 100; + } + + public void failRate(int rate) { + failRate = rate; + } + + public void onceFailedFailAlways() { + onceFailedFailAlways = true; + } + } + + static class SlowDownWriteSwitch { + private volatile int sleepSeconds; + + public void setSleepSeconds(int sleepSeconds) { + this.sleepSeconds = sleepSeconds; + } + + public int getSleepSeconds() { + return sleepSeconds; + } + } + + private static class ThrowingBlobStore extends FsBlobStore { + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public ThrowingBlobStore(int bufferSizeInBytes, Path path, boolean readonly, FailSwitch fail, SlowDownWriteSwitch slowDown) + throws IOException { + super(bufferSizeInBytes, path, readonly); + this.fail = fail; + this.slowDown = slowDown; + } + + @Override + public BlobContainer blobContainer(BlobPath path) { + try { + return new ThrowingBlobContainer(this, path, buildAndCreate(path), fail, slowDown); + } catch (IOException ex) { + throw new OpenSearchException("failed to create blob container", ex); + } + } + } + + private static class ThrowingBlobContainer extends FsBlobContainer { + + private final FailSwitch fail; + private final SlowDownWriteSwitch slowDown; + + public ThrowingBlobContainer(FsBlobStore blobStore, BlobPath blobPath, Path path, FailSwitch fail, SlowDownWriteSwitch slowDown) { + super(blobStore, blobPath, path); + this.fail = fail; + this.slowDown = slowDown; + } + + @Override + public void writeBlobAtomic(final String blobName, final InputStream inputStream, final long blobSize, boolean failIfAlreadyExists) + throws IOException { + checkFailRateAndSleep(blobName); + super.writeBlobAtomic(blobName, inputStream, blobSize, failIfAlreadyExists); + } + + private void checkFailRateAndSleep(String blobName) throws IOException { + if (fail.fail() && blobName.contains(".dat") == false) { + throw new IOException("blob container throwing error"); + } + if (slowDown.getSleepSeconds() > 0) { + try { + Thread.sleep(slowDown.getSleepSeconds() * 1000L); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public void writeBlob(String blobName, InputStream inputStream, long blobSize, boolean failIfAlreadyExists) throws IOException { + checkFailRateAndSleep(blobName); + super.writeBlob(blobName, inputStream, blobSize, failIfAlreadyExists); + } + } } diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index 6567cb03f3dc6..acec50f28860e 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -335,6 +335,7 @@ public void testRefreshSuccessOnFirstAttempt() throws Exception { RemoteStoreStatsTrackerFactory trackerFactory = tuple.v2(); RemoteSegmentTransferTracker segmentTracker = trackerFactory.getRemoteSegmentTransferTracker(indexShard.shardId()); assertNoLagAndTotalUploadsFailed(segmentTracker, 0); + assertTrue("remote store in sync", tuple.v1().isRemoteSegmentStoreInSync()); } public void testRefreshSuccessOnSecondAttempt() throws Exception { @@ -404,6 +405,20 @@ public void testRefreshSuccessOnThirdAttempt() throws Exception { assertNoLagAndTotalUploadsFailed(segmentTracker, 2); } + public void testRefreshPersistentFailure() throws Exception { + int succeedOnAttempt = 10; + CountDownLatch refreshCountLatch = new CountDownLatch(1); + CountDownLatch successLatch = new CountDownLatch(10); + Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> tuple = mockIndexShardWithRetryAndScheduleRefresh( + succeedOnAttempt, + refreshCountLatch, + successLatch + ); + // Giving 10ms for some iterations of remote refresh upload + Thread.sleep(10); + assertFalse("remote store should not in sync", tuple.v1().isRemoteSegmentStoreInSync()); + } + private void assertNoLagAndTotalUploadsFailed(RemoteSegmentTransferTracker segmentTracker, long totalUploadsFailed) throws Exception { assertBusy(() -> { assertEquals(0, segmentTracker.getBytesLag()); @@ -568,6 +583,7 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn // Mock indexShard.getSegmentInfosSnapshot() doAnswer(invocation -> { if (counter.incrementAndGet() <= succeedOnAttempt) { + logger.error("Failing in get segment info {}", counter.get()); throw new RuntimeException("Inducing failure in upload"); } return indexShard.getSegmentInfosSnapshot(); @@ -583,6 +599,7 @@ private Tuple<RemoteStoreRefreshListener, RemoteStoreStatsTrackerFactory> mockIn doAnswer(invocation -> { if (Objects.nonNull(successLatch)) { successLatch.countDown(); + logger.info("Value fo latch {}", successLatch.getCount()); } return indexShard.getEngine(); }).when(shard).getEngine(); @@ -642,6 +659,31 @@ private void verifyUploadedSegments(RemoteSegmentStoreDirectory remoteSegmentSto } } } + assertTrue(remoteStoreRefreshListener.isRemoteSegmentStoreInSync()); + } + + public void testRemoteSegmentStoreNotInSync() throws IOException { + setup(true, 3); + remoteStoreRefreshListener.afterRefresh(true); + try (Store remoteStore = indexShard.remoteStore()) { + RemoteSegmentStoreDirectory remoteSegmentStoreDirectory = + (RemoteSegmentStoreDirectory) ((FilterDirectory) ((FilterDirectory) remoteStore.directory()).getDelegate()).getDelegate(); + verifyUploadedSegments(remoteSegmentStoreDirectory); + remoteStoreRefreshListener.isRemoteSegmentStoreInSync(); + boolean oneFileDeleted = false; + // Delete any one file from remote store + try (GatedCloseable<SegmentInfos> segmentInfosGatedCloseable = indexShard.getSegmentInfosSnapshot()) { + SegmentInfos segmentInfos = segmentInfosGatedCloseable.get(); + for (String file : segmentInfos.files(true)) { + if (oneFileDeleted == false && RemoteStoreRefreshListener.EXCLUDE_FILES.contains(file) == false) { + remoteSegmentStoreDirectory.deleteFile(file); + oneFileDeleted = true; + break; + } + } + } + assertFalse(remoteStoreRefreshListener.isRemoteSegmentStoreInSync()); + } } } From 6b2c2f2d3e6bc2257376257d85b3d950556dbc91 Mon Sep 17 00:00:00 2001 From: Sachin Kale <sachinpkale@gmail.com> Date: Mon, 5 Feb 2024 12:19:39 +0530 Subject: [PATCH 166/178] Add drainRefreshes as part of tearDown in RemoteStoreRefreshListenerTests (#12157) Signed-off-by: Sachin Kale <kalsac@amazon.com> Co-authored-by: Sachin Kale <kalsac@amazon.com> --- .../index/shard/RemoteStoreRefreshListenerTests.java | 11 +++++++++++ .../opensearch/index/shard/IndexShardTestCase.java | 6 +++--- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java index acec50f28860e..85878cc2e1c9d 100644 --- a/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java +++ b/server/src/test/java/org/opensearch/index/shard/RemoteStoreRefreshListenerTests.java @@ -10,6 +10,7 @@ import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.tests.store.BaseDirectoryWrapper; @@ -102,6 +103,16 @@ private void indexDocs(int startDocId, int numberOfDocs) throws IOException { public void tearDown() throws Exception { Directory storeDirectory = ((FilterDirectory) ((FilterDirectory) indexShard.store().directory()).getDelegate()).getDelegate(); ((BaseDirectoryWrapper) storeDirectory).setCheckIndexOnClose(false); + + for (ReferenceManager.RefreshListener refreshListener : indexShard.getEngine().config().getInternalRefreshListener()) { + if (refreshListener instanceof ReleasableRetryableRefreshListener) { + ((ReleasableRetryableRefreshListener) refreshListener).drainRefreshes(); + } + } + if (remoteStoreRefreshListener != null) { + remoteStoreRefreshListener.drainRefreshes(); + } + closeShards(indexShard); super.tearDown(); } diff --git a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java index 412d5235fe462..bf1c4d4c94e04 100644 --- a/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/shard/IndexShardTestCase.java @@ -785,10 +785,10 @@ protected Store createRemoteStore(Path path, ShardRouting shardRouting, IndexMet protected RemoteSegmentStoreDirectory createRemoteSegmentStoreDirectory(ShardId shardId, Path path) throws IOException { NodeEnvironment.NodePath remoteNodePath = new NodeEnvironment.NodePath(path); ShardPath remoteShardPath = new ShardPath(false, remoteNodePath.resolve(shardId), remoteNodePath.resolve(shardId), shardId); - RemoteDirectory dataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex()); - RemoteDirectory metadataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex()); + RemoteDirectory dataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex().resolve("data")); + RemoteDirectory metadataDirectory = newRemoteDirectory(remoteShardPath.resolveIndex().resolve("metadata")); RemoteStoreLockManager remoteStoreLockManager = new RemoteStoreMetadataLockManager( - new RemoteBufferedOutputDirectory(getBlobContainer(remoteShardPath.resolveIndex())) + new RemoteBufferedOutputDirectory(getBlobContainer(remoteShardPath.resolveIndex().resolve("lock_files"))) ); return new RemoteSegmentStoreDirectory(dataDirectory, metadataDirectory, remoteStoreLockManager, threadPool, shardId); } From 985c4118bbc13a775d6c6c31a789290857766718 Mon Sep 17 00:00:00 2001 From: Andriy Redko <andriy.redko@aiven.io> Date: Mon, 5 Feb 2024 08:03:14 -0500 Subject: [PATCH 167/178] Update to Gradle 8.6 (#11757) Signed-off-by: Andriy Redko <andriy.redko@aiven.io> --- gradle/wrapper/gradle-wrapper.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index f1d76d80bbfa3..82a4add334a7d 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -11,7 +11,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.6-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c16d517b50dd28b3f5838f0e844b7520b8f1eb610f2f29de7e4e04a1b7c9c79b +distributionSha256Sum=85719317abd2112f021d4f41f09ec370534ba288432065f4b477b6a3b652910d From 0ef318fbbcdab827bf75aa5e59c7beaf29aaa01c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 08:18:20 -0600 Subject: [PATCH 168/178] Bump actions/github-script from 6.4.0 to 7.0.1 (#12166) * Bump actions/github-script from 6.4.0 to 7.0.1 Bumps [actions/github-script](https://github.com/actions/github-script) from 6.4.0 to 7.0.1. - [Release notes](https://github.com/actions/github-script/releases) - [Commits](https://github.com/actions/github-script/compare/v6.4.0...v7.0.1) --- updated-dependencies: - dependency-name: actions/github-script dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- .github/workflows/maintainer-approval.yml | 2 +- .github/workflows/triage.yml | 2 +- .github/workflows/version.yml | 2 +- CHANGELOG.md | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/maintainer-approval.yml b/.github/workflows/maintainer-approval.yml index 34e8f57cc1878..fdc2bf16937b4 100644 --- a/.github/workflows/maintainer-approval.yml +++ b/.github/workflows/maintainer-approval.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest steps: - id: find-maintainers - uses: actions/github-script@v7 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} result-encoding: string diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index c305818bdb0a9..83bf4926a8c2d 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -9,7 +9,7 @@ jobs: if: github.repository == 'opensearch-project/OpenSearch' runs-on: ubuntu-latest steps: - - uses: actions/github-script@v7 + - uses: actions/github-script@v7.0.1 with: script: | const { issue, repository } = context.payload; diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index f4adef1ff06b0..be2a89ac931e9 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -129,7 +129,7 @@ jobs: - name: Create tracking issue id: create-issue - uses: actions/github-script@v6.4.0 + uses: actions/github-script@v7.0.1 with: script: | const body = ` diff --git a/CHANGELOG.md b/CHANGELOG.md index 263ec57e856cd..e1bb202782a60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -153,7 +153,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `org.apache.logging.log4j:log4j-core` from 2.20.0 to 2.22.1 ([#10858](https://github.com/opensearch-project/OpenSearch/pull/10858), [#11000](https://github.com/opensearch-project/OpenSearch/pull/11000), [#11270](https://github.com/opensearch-project/OpenSearch/pull/11270), [#11695](https://github.com/opensearch-project/OpenSearch/pull/11695)) - Bump `aws-actions/configure-aws-credentials` from 2 to 4 ([#10504](https://github.com/opensearch-project/OpenSearch/pull/10504)) - Bump `stefanzweifel/git-auto-commit-action` from 4 to 5 ([#11171](https://github.com/opensearch-project/OpenSearch/pull/11171)) -- Bump `actions/github-script` from 6 to 7 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271)) +- Bump `actions/github-script` from 6 to 7.0.1 ([#11271](https://github.com/opensearch-project/OpenSearch/pull/11271), [#12166](https://github.com/opensearch-project/OpenSearch/pull/12166)) - Bump `jackson` and `jackson_databind` from 2.15.2 to 2.16.0 ([#11273](https://github.com/opensearch-project/OpenSearch/pull/11273)) - Bump `netty` from 4.1.100.Final to 4.1.106.Final ([#11294](https://github.com/opensearch-project/OpenSearch/pull/11294), [#11775](https://github.com/opensearch-project/OpenSearch/pull/11775)), [#12034](https://github.com/opensearch-project/OpenSearch/pull/12034)) - Bump `com.avast.gradle:gradle-docker-compose-plugin` from 0.16.12 to 0.17.6 ([#10163](https://github.com/opensearch-project/OpenSearch/pull/10163), [#11692](https://github.com/opensearch-project/OpenSearch/pull/11692)) From aad26307eca6edb8c819cad4af55935d5114ad50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:00:49 -0500 Subject: [PATCH 169/178] Bump com.netflix.nebula.ospackage-base from 11.6.0 to 11.8.0 in /distribution/packages (#12167) * Bump com.netflix.nebula.ospackage-base in /distribution/packages Bumps com.netflix.nebula.ospackage-base from 11.6.0 to 11.8.0. --- updated-dependencies: - dependency-name: com.netflix.nebula.ospackage-base dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 2 +- distribution/packages/build.gradle | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e1bb202782a60..efa219afc806b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -143,7 +143,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump Lucene from 9.7.0 to 9.8.0 ([10276](https://github.com/opensearch-project/OpenSearch/pull/10276)) - Bump `commons-io:commons-io` from 2.13.0 to 2.15.1 ([#10294](https://github.com/opensearch-project/OpenSearch/pull/10294), [#11001](https://github.com/opensearch-project/OpenSearch/pull/11001), [#11002](https://github.com/opensearch-project/OpenSearch/pull/11002), [#11446](https://github.com/opensearch-project/OpenSearch/pull/11446), [#11554](https://github.com/opensearch-project/OpenSearch/pull/11554), [#11560](https://github.com/opensearch-project/OpenSearch/pull/11560), [#11796](https://github.com/opensearch-project/OpenSearch/pull/11796)) - Bump `com.google.api.grpc:proto-google-common-protos` from 2.10.0 to 2.25.1 ([#10208](https://github.com/opensearch-project/OpenSearch/pull/10208), [#10298](https://github.com/opensearch-project/OpenSearch/pull/10298)) -- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.6.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630)) +- Bump `com.netflix.nebula.ospackage-base` from 11.4.0 to 11.8.0 ([#10295](https://github.com/opensearch-project/OpenSearch/pull/10295), [#11630](https://github.com/opensearch-project/OpenSearch/pull/11630), [#12167](https://github.com/opensearch-project/OpenSearch/pull/12167)) - Bump `org.apache.zookeeper:zookeeper` from 3.9.0 to 3.9.1 ([#10506](https://github.com/opensearch-project/OpenSearch/pull/10506)) - Bump `de.thetaphi:forbiddenapis` from 3.5.1 to 3.6 ([#10508](https://github.com/opensearch-project/OpenSearch/pull/10508)) - Bump `org.codehaus.woodstox:stax2-api` from 4.2.1 to 4.2.2 ([#10639](https://github.com/opensearch-project/OpenSearch/pull/10639)) diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index ededa7bff34d8..43c38c5ad0c67 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -63,7 +63,7 @@ import java.util.regex.Pattern */ plugins { - id "com.netflix.nebula.ospackage-base" version "11.6.0" + id "com.netflix.nebula.ospackage-base" version "11.8.0" } void addProcessFilesTask(String type, boolean jdk) { From 0a88963014729466cc91f83f80be4843a4ab2aab Mon Sep 17 00:00:00 2001 From: Mohit Godwani <81609427+mgodwan@users.noreply.github.com> Date: Mon, 5 Feb 2024 23:27:53 +0530 Subject: [PATCH 170/178] Enable Fuzzy codec for doc id fields using a bloom filter (#11027) * Enable Fuzzy codec for doc id fields using a bloom filter Signed-off-by: mgodwan <mgodwan@amazon.com> --- CHANGELOG.md | 1 + .../fuzzy/FilterConstructionBenchmark.java | 67 +++ .../codec/fuzzy/FilterLookupBenchmark.java | 80 +++ qa/rolling-upgrade/build.gradle | 1 + .../org/opensearch/upgrades/IndexingIT.java | 88 +++- .../common/settings/FeatureFlagSettings.java | 3 +- .../common/settings/IndexScopedSettings.java | 3 + .../opensearch/common/util/FeatureFlags.java | 7 + .../org/opensearch/index/IndexSettings.java | 72 +++ .../PerFieldMappingPostingFormatCodec.java | 19 + .../index/codec/fuzzy/AbstractFuzzySet.java | 61 +++ .../index/codec/fuzzy/BloomFilter.java | 150 ++++++ .../fuzzy/FuzzyFilterPostingsFormat.java | 492 ++++++++++++++++++ .../index/codec/fuzzy/FuzzySet.java | 98 ++++ .../index/codec/fuzzy/FuzzySetFactory.java | 49 ++ .../index/codec/fuzzy/FuzzySetParameters.java | 34 ++ .../fuzzy/IndexInputImmutableLongArray.java | 70 +++ .../codec/fuzzy/LongArrayBackedBitSet.java | 105 ++++ .../index/codec/fuzzy/package-info.java | 10 + .../index/engine/SegmentsStats.java | 4 +- .../org.apache.lucene.codecs.PostingsFormat | 1 + .../index/codec/fuzzy/BloomFilterTests.java | 82 +++ .../fuzzy/FuzzyFilterPostingsFormatTests.java | 34 ++ .../test/OpenSearchIntegTestCase.java | 10 + 24 files changed, 1538 insertions(+), 3 deletions(-) create mode 100644 benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java create mode 100644 benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java create mode 100644 server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java create mode 100644 server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java create mode 100644 server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index efa219afc806b..18d0e431a4b5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -137,6 +137,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) - Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) - Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074)) +- Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java new file mode 100644 index 0000000000000..4e995f5a5067c --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterConstructionBenchmark.java @@ -0,0 +1,67 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.benchmark.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.UUIDs; +import org.opensearch.index.codec.fuzzy.FuzzySet; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; +import org.opensearch.index.mapper.IdFieldMapper; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +@Fork(3) +@Warmup(iterations = 2) +@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class FilterConstructionBenchmark { + + private List<BytesRef> items; + + @Param({ "1000000", "10000000", "50000000" }) + private int numIds; + + @Param({ "0.0511", "0.1023", "0.2047" }) + private double fpp; + + private FuzzySetFactory fuzzySetFactory; + private String fieldName; + + @Setup + public void setupIds() { + this.fieldName = IdFieldMapper.NAME; + this.items = IntStream.range(0, numIds).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList()); + FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp); + this.fuzzySetFactory = new FuzzySetFactory(Map.of(fieldName, parameters)); + } + + @Benchmark + public FuzzySet buildFilter() throws IOException { + return fuzzySetFactory.createFuzzySet(items.size(), fieldName, () -> items.iterator()); + } +} diff --git a/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java new file mode 100644 index 0000000000000..383539219830e --- /dev/null +++ b/benchmarks/src/main/java/org/opensearch/benchmark/index/codec/fuzzy/FilterLookupBenchmark.java @@ -0,0 +1,80 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.benchmark.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.UUIDs; +import org.opensearch.index.codec.fuzzy.FuzzySet; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; +import org.opensearch.index.mapper.IdFieldMapper; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +@Fork(3) +@Warmup(iterations = 2) +@Measurement(iterations = 5, time = 60, timeUnit = TimeUnit.SECONDS) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +@State(Scope.Benchmark) +public class FilterLookupBenchmark { + + @Param({ "50000000", "1000000" }) + private int numItems; + + @Param({ "1000000" }) + private int searchKeyCount; + + @Param({ "0.0511", "0.1023", "0.2047" }) + private double fpp; + + private FuzzySet fuzzySet; + private List<BytesRef> items; + private Random random = new Random(); + + @Setup + public void setupFilter() throws IOException { + String fieldName = IdFieldMapper.NAME; + items = IntStream.range(0, numItems).mapToObj(i -> new BytesRef(UUIDs.base64UUID())).collect(Collectors.toList()); + FuzzySetParameters parameters = new FuzzySetParameters(() -> fpp); + fuzzySet = new FuzzySetFactory(Map.of(fieldName, parameters)).createFuzzySet(numItems, fieldName, () -> items.iterator()); + } + + @Benchmark + public void contains_withExistingKeys(Blackhole blackhole) throws IOException { + for (int i = 0; i < searchKeyCount; i++) { + blackhole.consume(fuzzySet.contains(items.get(random.nextInt(items.size()))) == FuzzySet.Result.MAYBE); + } + } + + @Benchmark + public void contains_withRandomKeys(Blackhole blackhole) throws IOException { + for (int i = 0; i < searchKeyCount; i++) { + blackhole.consume(fuzzySet.contains(new BytesRef(UUIDs.base64UUID()))); + } + } +} diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 3dff452be855f..777377f04e8b9 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -62,6 +62,7 @@ for (Version bwcVersion : BuildParams.bwcVersions.wireCompatible) { setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'http.content_type.required', 'true' + systemProperty 'opensearch.experimental.optimize_doc_id_lookup.fuzzy_set.enabled', 'true' } } diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index 1577260e145d4..08a5e92fc2d02 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -40,10 +40,10 @@ import org.opensearch.common.Booleans; import org.opensearch.common.io.Streams; import org.opensearch.common.settings.Settings; +import org.opensearch.index.IndexSettings; import org.opensearch.index.codec.CodecService; import org.opensearch.index.engine.EngineConfig; import org.opensearch.indices.replication.common.ReplicationType; -import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.rest.yaml.ObjectPath; import java.io.IOException; @@ -344,6 +344,92 @@ public void testIndexingWithSegRep() throws Exception { } } + public void testIndexingWithFuzzyFilterPostings() throws Exception { + if (UPGRADE_FROM_VERSION.onOrBefore(Version.V_2_11_1)) { + logger.info("--> Skip test for version {} where fuzzy filter postings format feature is not available", UPGRADE_FROM_VERSION); + return; + } + final String indexName = "test-index-fuzzy-set"; + final int shardCount = 3; + final int replicaCount = 1; + logger.info("--> Case {}", CLUSTER_TYPE); + printClusterNodes(); + logger.info("--> _cat/shards before test execution \n{}", EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/shards?v")).getEntity())); + switch (CLUSTER_TYPE) { + case OLD: + Settings.Builder settings = Settings.builder() + .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), shardCount) + .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), replicaCount) + .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) + .put( + EngineConfig.INDEX_CODEC_SETTING.getKey(), + randomFrom(new ArrayList<>(CODECS) { + { + add(CodecService.LUCENE_DEFAULT_CODEC); + } + }) + ) + .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms"); + createIndex(indexName, settings.build()); + waitForClusterHealthWithNoShardMigration(indexName, "green"); + bulk(indexName, "_OLD", 5); + break; + case MIXED: + waitForClusterHealthWithNoShardMigration(indexName, "yellow"); + break; + case UPGRADED: + Settings.Builder settingsBuilder = Settings.builder() + .put(IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING.getKey(), true); + updateIndexSettings(indexName, settingsBuilder); + waitForClusterHealthWithNoShardMigration(indexName, "green"); + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + + int expectedCount; + switch (CLUSTER_TYPE) { + case OLD: + expectedCount = 5; + break; + case MIXED: + if (Booleans.parseBoolean(System.getProperty("tests.first_round"))) { + expectedCount = 5; + } else { + expectedCount = 10; + } + break; + case UPGRADED: + expectedCount = 15; + break; + default: + throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); + } + + waitForSearchableDocs(indexName, shardCount, replicaCount); + assertCount(indexName, expectedCount); + + if (CLUSTER_TYPE != ClusterType.OLD) { + bulk(indexName, "_" + CLUSTER_TYPE, 5); + logger.info("--> Index one doc (to be deleted next) and verify doc count"); + Request toBeDeleted = new Request("PUT", "/" + indexName + "/_doc/to_be_deleted"); + toBeDeleted.addParameter("refresh", "true"); + toBeDeleted.setJsonEntity("{\"f1\": \"delete-me\"}"); + client().performRequest(toBeDeleted); + waitForSearchableDocs(indexName, shardCount, replicaCount); + assertCount(indexName, expectedCount + 6); + + logger.info("--> Delete previously added doc and verify doc count"); + Request delete = new Request("DELETE", "/" + indexName + "/_doc/to_be_deleted"); + delete.addParameter("refresh", "true"); + client().performRequest(delete); + waitForSearchableDocs(indexName, shardCount, replicaCount); + assertCount(indexName, expectedCount + 5); + + //forceMergeAndVerify(indexName, shardCount * (1 + replicaCount)); + } + } + public void testAutoIdWithOpTypeCreate() throws IOException { final String indexName = "auto_id_and_op_type_create_index"; StringBuilder b = new StringBuilder(); diff --git a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java index 44dc4161f093a..e6d7ba0c60772 100644 --- a/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/FeatureFlagSettings.java @@ -34,6 +34,7 @@ protected FeatureFlagSettings( FeatureFlags.IDENTITY_SETTING, FeatureFlags.TELEMETRY_SETTING, FeatureFlags.DATETIME_FORMATTER_CACHING_SETTING, - FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING + FeatureFlags.WRITEABLE_REMOTE_INDEX_SETTING, + FeatureFlags.DOC_ID_FUZZY_SET_SETTING ); } diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index 97d6a0ddf02c8..cf502fe685fcf 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -229,6 +229,9 @@ public final class IndexScopedSettings extends AbstractScopedSettings { IndexMetadata.INDEX_REMOTE_SEGMENT_STORE_REPOSITORY_SETTING, IndexMetadata.INDEX_REMOTE_TRANSLOG_REPOSITORY_SETTING, + IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING, + IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING, + // Settings for concurrent segment search IndexSettings.INDEX_CONCURRENT_SEGMENT_SEARCH_SETTING, diff --git a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java index c88a795501ca6..075dc9934e130 100644 --- a/server/src/main/java/org/opensearch/common/util/FeatureFlags.java +++ b/server/src/main/java/org/opensearch/common/util/FeatureFlags.java @@ -54,6 +54,11 @@ public class FeatureFlags { */ public static final String WRITEABLE_REMOTE_INDEX = "opensearch.experimental.feature.writeable_remote_index.enabled"; + /** + * Gates the optimization to enable bloom filters for doc id lookup. + */ + public static final String DOC_ID_FUZZY_SET = "opensearch.experimental.optimize_doc_id_lookup.fuzzy_set.enabled"; + /** * Should store the settings from opensearch.yml. */ @@ -110,4 +115,6 @@ public static boolean isEnabled(Setting<Boolean> featureFlag) { false, Property.NodeScope ); + + public static final Setting<Boolean> DOC_ID_FUZZY_SET_SETTING = Setting.boolSetting(DOC_ID_FUZZY_SET, false, Property.NodeScope); } diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index 00e765d73f77f..43a3a49418fbf 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -65,7 +65,9 @@ import java.util.function.UnaryOperator; import static org.opensearch.Version.V_2_7_0; +import static org.opensearch.common.util.FeatureFlags.DOC_ID_FUZZY_SET_SETTING; import static org.opensearch.common.util.FeatureFlags.SEARCHABLE_SNAPSHOT_EXTENDED_COMPATIBILITY; +import static org.opensearch.index.codec.fuzzy.FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING; import static org.opensearch.index.mapper.MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING; @@ -658,6 +660,22 @@ public static IndexMergePolicy fromString(String text) { Property.Dynamic ); + public static final Setting<Boolean> INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING = Setting.boolSetting( + "index.optimize_doc_id_lookup.fuzzy_set.enabled", + false, + Property.IndexScope, + Property.Dynamic + ); + + public static final Setting<Double> INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING = Setting.doubleSetting( + "index.optimize_doc_id_lookup.fuzzy_set.false_positive_probability", + DEFAULT_FALSE_POSITIVE_PROBABILITY, + 0.01, + 0.50, + Property.IndexScope, + Property.Dynamic + ); + public static final TimeValue DEFAULT_REMOTE_TRANSLOG_BUFFER_INTERVAL = new TimeValue(650, TimeUnit.MILLISECONDS); public static final TimeValue MINIMUM_REMOTE_TRANSLOG_BUFFER_INTERVAL = TimeValue.ZERO; public static final Setting<TimeValue> INDEX_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING = Setting.timeSetting( @@ -787,6 +805,16 @@ private void setRetentionLeaseMillis(final TimeValue retentionLease) { */ private volatile UnaryOperator<MergePolicy> mergeOnFlushPolicy; + /** + * Is fuzzy set enabled for doc id + */ + private volatile boolean enableFuzzySetForDocId; + + /** + * False positive probability to use while creating fuzzy set. + */ + private volatile double docIdFuzzySetFalsePositiveProbability; + /** * Returns the default search fields for this index. */ @@ -926,6 +954,13 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti * Now this sortField (IndexSort) is stored in SegmentInfo and we need to maintain backward compatibility for them. */ widenIndexSortType = IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).before(V_2_7_0); + + boolean isOptimizeDocIdLookupUsingFuzzySetFeatureEnabled = FeatureFlags.isEnabled(DOC_ID_FUZZY_SET_SETTING); + if (isOptimizeDocIdLookupUsingFuzzySetFeatureEnabled) { + enableFuzzySetForDocId = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING); + docIdFuzzySetFalsePositiveProbability = scopedSettings.get(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING); + } + scopedSettings.addSettingsUpdateConsumer( TieredMergePolicyProvider.INDEX_COMPOUND_FORMAT_SETTING, tieredMergePolicyProvider::setNoCFSRatio @@ -1032,6 +1067,11 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti this::setRemoteTranslogUploadBufferInterval ); scopedSettings.addSettingsUpdateConsumer(INDEX_REMOTE_TRANSLOG_KEEP_EXTRA_GEN_SETTING, this::setRemoteTranslogKeepExtraGen); + scopedSettings.addSettingsUpdateConsumer(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING, this::setEnableFuzzySetForDocId); + scopedSettings.addSettingsUpdateConsumer( + INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING, + this::setDocIdFuzzySetFalsePositiveProbability + ); } private void setSearchIdleAfter(TimeValue searchIdleAfter) { @@ -1801,4 +1841,36 @@ public void setDefaultSearchPipeline(String defaultSearchPipeline) { public boolean shouldWidenIndexSortType() { return this.widenIndexSortType; } + + public boolean isEnableFuzzySetForDocId() { + return enableFuzzySetForDocId; + } + + public void setEnableFuzzySetForDocId(boolean enableFuzzySetForDocId) { + verifyFeatureToSetDocIdFuzzySetSetting(enabled -> this.enableFuzzySetForDocId = enabled, enableFuzzySetForDocId); + } + + public double getDocIdFuzzySetFalsePositiveProbability() { + return docIdFuzzySetFalsePositiveProbability; + } + + public void setDocIdFuzzySetFalsePositiveProbability(double docIdFuzzySetFalsePositiveProbability) { + verifyFeatureToSetDocIdFuzzySetSetting( + fpp -> this.docIdFuzzySetFalsePositiveProbability = fpp, + docIdFuzzySetFalsePositiveProbability + ); + } + + private static <T> void verifyFeatureToSetDocIdFuzzySetSetting(Consumer<T> settingUpdater, T val) { + if (FeatureFlags.isEnabled(DOC_ID_FUZZY_SET_SETTING)) { + settingUpdater.accept(val); + } else { + throw new IllegalArgumentException( + "Fuzzy set for optimizing doc id lookup " + + "cannot be enabled with feature flag [" + + FeatureFlags.DOC_ID_FUZZY_SET + + "] set to false" + ); + } + } } diff --git a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java index dc28ad2d6dc07..1ad17f121560c 100644 --- a/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/server/src/main/java/org/opensearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -39,10 +39,16 @@ import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.opensearch.common.lucene.Lucene; +import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; +import org.opensearch.index.codec.fuzzy.FuzzySetFactory; +import org.opensearch.index.codec.fuzzy.FuzzySetParameters; import org.opensearch.index.mapper.CompletionFieldMapper; +import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; +import java.util.Map; + /** * {@link PerFieldMappingPostingFormatCodec This postings format} is the default * {@link PostingsFormat} for OpenSearch. It utilizes the @@ -57,6 +63,8 @@ public class PerFieldMappingPostingFormatCodec extends Lucene99Codec { private final Logger logger; private final MapperService mapperService; private final DocValuesFormat dvFormat = new Lucene90DocValuesFormat(); + private final FuzzySetFactory fuzzySetFactory; + private PostingsFormat docIdPostingsFormat; static { assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMappingPostingFormatCodec.class) @@ -67,6 +75,12 @@ public PerFieldMappingPostingFormatCodec(Mode compressionMode, MapperService map super(compressionMode); this.mapperService = mapperService; this.logger = logger; + fuzzySetFactory = new FuzzySetFactory( + Map.of( + IdFieldMapper.NAME, + new FuzzySetParameters(() -> mapperService.getIndexSettings().getDocIdFuzzySetFalsePositiveProbability()) + ) + ); } @Override @@ -76,6 +90,11 @@ public PostingsFormat getPostingsFormatForField(String field) { logger.warn("no index mapper found for field: [{}] returning default postings format", field); } else if (fieldType instanceof CompletionFieldMapper.CompletionFieldType) { return CompletionFieldMapper.CompletionFieldType.postingsFormat(); + } else if (IdFieldMapper.NAME.equals(field) && mapperService.getIndexSettings().isEnableFuzzySetForDocId()) { + if (docIdPostingsFormat == null) { + docIdPostingsFormat = new FuzzyFilterPostingsFormat(super.getPostingsFormatForField(field), fuzzySetFactory); + } + return docIdPostingsFormat; } return super.getPostingsFormatForField(field); } diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java new file mode 100644 index 0000000000000..09976297361fa --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/AbstractFuzzySet.java @@ -0,0 +1,61 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedSupplier; +import org.opensearch.common.hash.T1ha1; + +import java.io.IOException; +import java.util.Iterator; + +/** + * Encapsulates common behaviour implementation for a fuzzy set. + */ +public abstract class AbstractFuzzySet implements FuzzySet { + + /** + * Add an item to this fuzzy set. + * @param value The value to be added + */ + protected abstract void add(BytesRef value); + + /** + * Add all items to the underlying set. + * Implementations can choose to perform this using an optimized strategy based on the type of set. + * @param valuesIteratorProvider Supplier for an iterator over All values which should be added to the set. + */ + protected void addAll(CheckedSupplier<Iterator<BytesRef>, IOException> valuesIteratorProvider) throws IOException { + Iterator<BytesRef> values = valuesIteratorProvider.get(); + while (values.hasNext()) { + add(values.next()); + } + } + + public Result contains(BytesRef val) { + return containsHash(generateKey(val)); + } + + protected abstract Result containsHash(long hash); + + protected long generateKey(BytesRef value) { + return T1ha1.hash(value.bytes, value.offset, value.length, 0L); + } + + protected void assertAllElementsExist(CheckedSupplier<Iterator<BytesRef>, IOException> iteratorProvider) throws IOException { + Iterator<BytesRef> iter = iteratorProvider.get(); + int cnt = 0; + while (iter.hasNext()) { + BytesRef item = iter.next(); + assert contains(item) == Result.MAYBE + : "Expected Filter to return positive response for elements added to it. Elements matched: " + cnt; + cnt++; + } + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java new file mode 100644 index 0000000000000..b8a8352183ca8 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/BloomFilter.java @@ -0,0 +1,150 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Apache Lucene project (https://github.com/apache/lucene) under the Apache License, version 2.0. + * Copyright 2001-2022 The Apache Software Foundation + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.RamUsageEstimator; +import org.opensearch.common.CheckedSupplier; +import org.opensearch.common.util.io.IOUtils; +import org.opensearch.core.Assertions; + +import java.io.IOException; +import java.util.Iterator; + +/** + * The code is based on Lucene's implementation of Bloom Filter. + * It represents a subset of the Lucene implementation needed for OpenSearch use cases. + * Since the Lucene implementation is marked experimental, + * this aims to ensure we can provide a bwc implementation during upgrades. + */ +public class BloomFilter extends AbstractFuzzySet { + + private static final Logger logger = LogManager.getLogger(BloomFilter.class); + + // The sizes of BitSet used are all numbers that, when expressed in binary form, + // are all ones. This is to enable fast downsizing from one bitset to another + // by simply ANDing each set index in one bitset with the size of the target bitset + // - this provides a fast modulo of the number. Values previously accumulated in + // a large bitset and then mapped to a smaller set can be looked up using a single + // AND operation of the query term's hash rather than needing to perform a 2-step + // translation of the query term that mirrors the stored content's reprojections. + static final int[] usableBitSetSizes; + + static { + usableBitSetSizes = new int[26]; + for (int i = 0; i < usableBitSetSizes.length; i++) { + usableBitSetSizes[i] = (1 << (i + 6)) - 1; + } + } + + private final LongArrayBackedBitSet bitset; + private final int setSize; + private final int hashCount; + + BloomFilter(long maxDocs, double maxFpp, CheckedSupplier<Iterator<BytesRef>, IOException> fieldIteratorProvider) throws IOException { + int setSize = (int) Math.ceil((maxDocs * Math.log(maxFpp)) / Math.log(1 / Math.pow(2, Math.log(2)))); + setSize = getNearestSetSize(setSize < Integer.MAX_VALUE / 2 ? 2 * setSize : Integer.MAX_VALUE); + int optimalK = (int) Math.round(((double) setSize / maxDocs) * Math.log(2)); + this.bitset = new LongArrayBackedBitSet(setSize); + this.setSize = setSize; + this.hashCount = optimalK; + addAll(fieldIteratorProvider); + if (Assertions.ENABLED) { + assertAllElementsExist(fieldIteratorProvider); + } + logger.debug("Bloom filter created with fpp: {}, setSize: {}, hashCount: {}", maxFpp, setSize, hashCount); + } + + BloomFilter(IndexInput in) throws IOException { + hashCount = in.readInt(); + setSize = in.readInt(); + this.bitset = new LongArrayBackedBitSet(in); + } + + @Override + public void writeTo(DataOutput out) throws IOException { + out.writeInt(hashCount); + out.writeInt(setSize); + bitset.writeTo(out); + } + + private static int getNearestSetSize(int maxNumberOfBits) { + assert maxNumberOfBits > 0 : "Provided size estimate for bloom filter is illegal (<=0) : " + maxNumberOfBits; + int result = usableBitSetSizes[0]; + for (int i = 0; i < usableBitSetSizes.length; i++) { + if (usableBitSetSizes[i] <= maxNumberOfBits) { + result = usableBitSetSizes[i]; + } + } + return result; + } + + @Override + public SetType setType() { + return SetType.BLOOM_FILTER_V1; + } + + @Override + public Result containsHash(long hash) { + int msb = (int) (hash >>> Integer.SIZE); + int lsb = (int) hash; + for (int i = 0; i < hashCount; i++) { + int bloomPos = (lsb + i * msb); + if (!mayContainValue(bloomPos)) { + return Result.NO; + } + } + return Result.MAYBE; + } + + protected void add(BytesRef value) { + long hash = generateKey(value); + int msb = (int) (hash >>> Integer.SIZE); + int lsb = (int) hash; + for (int i = 0; i < hashCount; i++) { + // Bitmasking using bloomSize is effectively a modulo operation since set sizes are always power of 2 + int bloomPos = (lsb + i * msb) & setSize; + bitset.set(bloomPos); + } + } + + @Override + public boolean isSaturated() { + long numBitsSet = bitset.cardinality(); + // Don't bother saving bitsets if >90% of bits are set - we don't want to + // throw any more memory at this problem. + return (float) numBitsSet / (float) setSize > 0.9f; + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.sizeOf(bitset.ramBytesUsed()); + } + + private boolean mayContainValue(int aHash) { + // Bloom sizes are always base 2 and so can be ANDed for a fast modulo + int pos = aHash & setSize; + return bitset.get(pos); + } + + @Override + public void close() throws IOException { + IOUtils.close(bitset); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java new file mode 100644 index 0000000000000..01f8054fc91be --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormat.java @@ -0,0 +1,492 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Based on code from the Apache Lucene project (https://github.com/apache/lucene) under the Apache License, version 2.0. + * Copyright 2001-2022 The Apache Software Foundation + * Modifications (C) OpenSearch Contributors. All Rights Reserved. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.FieldsConsumer; +import org.apache.lucene.codecs.FieldsProducer; +import org.apache.lucene.codecs.NormsProducer; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.Fields; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.opensearch.common.util.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +/** + * Based on Lucene's BloomFilterPostingsFormat. + * Discussion with Lucene community based on which the decision to have this in OpenSearch code was taken + * is captured here: https://github.com/apache/lucene/issues/12986 + * + * The class deals with persisting the bloom filter through the postings format, + * and reading the field via a bloom filter fronted terms enum (to reduce disk seeks in case of absence of requested values) + * The class should be handled during lucene upgrades. There are bwc tests present to verify the format continues to work after upgrade. + */ + +public final class FuzzyFilterPostingsFormat extends PostingsFormat { + + private static final Logger logger = LogManager.getLogger(FuzzyFilterPostingsFormat.class); + + /** + * This name is stored in headers. If changing the implementation for the format, this name/version should be updated + * so that reads can work as expected. + */ + public static final String FUZZY_FILTER_CODEC_NAME = "FuzzyFilterCodec99"; + + public static final int VERSION_START = 0; + public static final int VERSION_CURRENT = VERSION_START; + + /** Extension of Fuzzy Filters file */ + public static final String FUZZY_FILTER_FILE_EXTENSION = "fzd"; + + private final PostingsFormat delegatePostingsFormat; + private final FuzzySetFactory fuzzySetFactory; + + public FuzzyFilterPostingsFormat(PostingsFormat delegatePostingsFormat, FuzzySetFactory fuzzySetFactory) { + super(FUZZY_FILTER_CODEC_NAME); + this.delegatePostingsFormat = delegatePostingsFormat; + this.fuzzySetFactory = fuzzySetFactory; + } + + // Needed for SPI + public FuzzyFilterPostingsFormat() { + this(null, null); + } + + @Override + public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + if (delegatePostingsFormat == null) { + throw new UnsupportedOperationException( + "Error - " + getClass().getName() + " has been constructed without a choice of PostingsFormat" + ); + } + FieldsConsumer fieldsConsumer = delegatePostingsFormat.fieldsConsumer(state); + return new FuzzyFilteredFieldsConsumer(fieldsConsumer, state); + } + + @Override + public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { + return new FuzzyFilteredFieldsProducer(state); + } + + static class FuzzyFilteredFieldsProducer extends FieldsProducer { + private FieldsProducer delegateFieldsProducer; + HashMap<String, FuzzySet> fuzzySetsByFieldName = new HashMap<>(); + private List<Closeable> closeables = new ArrayList<>(); + + public FuzzyFilteredFieldsProducer(SegmentReadState state) throws IOException { + String fuzzyFilterFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + FUZZY_FILTER_FILE_EXTENSION + ); + IndexInput filterIn = null; + boolean success = false; + try { + // Using IndexInput directly instead of ChecksumIndexInput since we want to support RandomAccessInput + filterIn = state.directory.openInput(fuzzyFilterFileName, state.context); + + CodecUtil.checkIndexHeader( + filterIn, + FUZZY_FILTER_CODEC_NAME, + VERSION_START, + VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + // Load the delegate postings format + PostingsFormat delegatePostingsFormat = PostingsFormat.forName(filterIn.readString()); + this.delegateFieldsProducer = delegatePostingsFormat.fieldsProducer(state); + int numFilters = filterIn.readInt(); + for (int i = 0; i < numFilters; i++) { + int fieldNum = filterIn.readInt(); + FuzzySet set = FuzzySetFactory.deserializeFuzzySet(filterIn); + closeables.add(set); + FieldInfo fieldInfo = state.fieldInfos.fieldInfo(fieldNum); + fuzzySetsByFieldName.put(fieldInfo.name, set); + } + CodecUtil.retrieveChecksum(filterIn); + + // Can we disable it if we foresee performance issues? + CodecUtil.checksumEntireFile(filterIn); + success = true; + closeables.add(filterIn); + } finally { + if (!success) { + IOUtils.closeWhileHandlingException(filterIn, delegateFieldsProducer); + } + } + } + + @Override + public Iterator<String> iterator() { + return delegateFieldsProducer.iterator(); + } + + @Override + public void close() throws IOException { + // Why closing here? + IOUtils.closeWhileHandlingException(closeables); + delegateFieldsProducer.close(); + } + + @Override + public Terms terms(String field) throws IOException { + FuzzySet filter = fuzzySetsByFieldName.get(field); + if (filter == null) { + return delegateFieldsProducer.terms(field); + } else { + Terms result = delegateFieldsProducer.terms(field); + if (result == null) { + return null; + } + return new FuzzyFilteredTerms(result, filter); + } + } + + @Override + public int size() { + return delegateFieldsProducer.size(); + } + + static class FuzzyFilteredTerms extends Terms { + private Terms delegateTerms; + private FuzzySet filter; + + public FuzzyFilteredTerms(Terms terms, FuzzySet filter) { + this.delegateTerms = terms; + this.filter = filter; + } + + @Override + public TermsEnum intersect(CompiledAutomaton compiled, final BytesRef startTerm) throws IOException { + return delegateTerms.intersect(compiled, startTerm); + } + + @Override + public TermsEnum iterator() throws IOException { + return new FilterAppliedTermsEnum(delegateTerms, filter); + } + + @Override + public long size() throws IOException { + return delegateTerms.size(); + } + + @Override + public long getSumTotalTermFreq() throws IOException { + return delegateTerms.getSumTotalTermFreq(); + } + + @Override + public long getSumDocFreq() throws IOException { + return delegateTerms.getSumDocFreq(); + } + + @Override + public int getDocCount() throws IOException { + return delegateTerms.getDocCount(); + } + + @Override + public boolean hasFreqs() { + return delegateTerms.hasFreqs(); + } + + @Override + public boolean hasOffsets() { + return delegateTerms.hasOffsets(); + } + + @Override + public boolean hasPositions() { + return delegateTerms.hasPositions(); + } + + @Override + public boolean hasPayloads() { + return delegateTerms.hasPayloads(); + } + + @Override + public BytesRef getMin() throws IOException { + return delegateTerms.getMin(); + } + + @Override + public BytesRef getMax() throws IOException { + return delegateTerms.getMax(); + } + } + + static final class FilterAppliedTermsEnum extends BaseTermsEnum { + + private Terms delegateTerms; + private TermsEnum delegateTermsEnum; + private final FuzzySet filter; + + public FilterAppliedTermsEnum(Terms delegateTerms, FuzzySet filter) throws IOException { + this.delegateTerms = delegateTerms; + this.filter = filter; + } + + void reset(Terms delegateTerms) throws IOException { + this.delegateTerms = delegateTerms; + this.delegateTermsEnum = null; + } + + private TermsEnum delegate() throws IOException { + if (delegateTermsEnum == null) { + /* pull the iterator only if we really need it - + * this can be a relativly heavy operation depending on the + * delegate postings format and the underlying directory + * (clone IndexInput) */ + delegateTermsEnum = delegateTerms.iterator(); + } + return delegateTermsEnum; + } + + @Override + public BytesRef next() throws IOException { + return delegate().next(); + } + + @Override + public boolean seekExact(BytesRef text) throws IOException { + // The magical fail-fast speed up that is the entire point of all of + // this code - save a disk seek if there is a match on an in-memory + // structure + // that may occasionally give a false positive but guaranteed no false + // negatives + if (filter.contains(text) == FuzzySet.Result.NO) { + return false; + } + return delegate().seekExact(text); + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + return delegate().seekCeil(text); + } + + @Override + public void seekExact(long ord) throws IOException { + delegate().seekExact(ord); + } + + @Override + public BytesRef term() throws IOException { + return delegate().term(); + } + + @Override + public long ord() throws IOException { + return delegate().ord(); + } + + @Override + public int docFreq() throws IOException { + return delegate().docFreq(); + } + + @Override + public long totalTermFreq() throws IOException { + return delegate().totalTermFreq(); + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + return delegate().postings(reuse, flags); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + return delegate().impacts(flags); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(filter=" + filter.toString() + ")"; + } + } + + @Override + public void checkIntegrity() throws IOException { + delegateFieldsProducer.checkIntegrity(); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(fields=" + fuzzySetsByFieldName.size() + ",delegate=" + delegateFieldsProducer + ")"; + } + } + + class FuzzyFilteredFieldsConsumer extends FieldsConsumer { + private FieldsConsumer delegateFieldsConsumer; + private Map<FieldInfo, FuzzySet> fuzzySets = new HashMap<>(); + private SegmentWriteState state; + private List<Closeable> closeables = new ArrayList<>(); + + public FuzzyFilteredFieldsConsumer(FieldsConsumer fieldsConsumer, SegmentWriteState state) { + this.delegateFieldsConsumer = fieldsConsumer; + this.state = state; + } + + @Override + public void write(Fields fields, NormsProducer norms) throws IOException { + + // Delegate must write first: it may have opened files + // on creating the class + // (e.g. Lucene41PostingsConsumer), and write() will + // close them; alternatively, if we delayed pulling + // the fields consumer until here, we could do it + // afterwards: + delegateFieldsConsumer.write(fields, norms); + + for (String field : fields) { + Terms terms = fields.terms(field); + if (terms == null) { + continue; + } + FieldInfo fieldInfo = state.fieldInfos.fieldInfo(field); + FuzzySet fuzzySet = fuzzySetFactory.createFuzzySet(state.segmentInfo.maxDoc(), fieldInfo.name, () -> iterator(terms)); + if (fuzzySet == null) { + break; + } + assert fuzzySets.containsKey(fieldInfo) == false; + closeables.add(fuzzySet); + fuzzySets.put(fieldInfo, fuzzySet); + } + } + + private Iterator<BytesRef> iterator(Terms terms) throws IOException { + TermsEnum termIterator = terms.iterator(); + return new Iterator<>() { + + private BytesRef currentTerm; + private PostingsEnum postingsEnum; + + @Override + public boolean hasNext() { + try { + do { + currentTerm = termIterator.next(); + if (currentTerm == null) { + return false; + } + postingsEnum = termIterator.postings(postingsEnum, 0); + if (postingsEnum.nextDoc() != PostingsEnum.NO_MORE_DOCS) { + return true; + } + } while (true); + } catch (IOException ex) { + throw new IllegalStateException("Cannot read terms: " + termIterator.attributes()); + } + } + + @Override + public BytesRef next() { + return currentTerm; + } + }; + } + + private boolean closed; + + @Override + public void close() throws IOException { + if (closed) { + return; + } + closed = true; + delegateFieldsConsumer.close(); + + // Now we are done accumulating values for these fields + List<Map.Entry<FieldInfo, FuzzySet>> nonSaturatedSets = new ArrayList<>(); + + for (Map.Entry<FieldInfo, FuzzySet> entry : fuzzySets.entrySet()) { + FuzzySet fuzzySet = entry.getValue(); + if (!fuzzySet.isSaturated()) { + nonSaturatedSets.add(entry); + } + } + String fuzzyFilterFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + FUZZY_FILTER_FILE_EXTENSION + ); + try (IndexOutput fuzzyFilterFileOutput = state.directory.createOutput(fuzzyFilterFileName, state.context)) { + logger.trace( + "Writing fuzzy filter postings with version: {} for segment: {}", + VERSION_CURRENT, + state.segmentInfo.toString() + ); + CodecUtil.writeIndexHeader( + fuzzyFilterFileOutput, + FUZZY_FILTER_CODEC_NAME, + VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + + // remember the name of the postings format we will delegate to + fuzzyFilterFileOutput.writeString(delegatePostingsFormat.getName()); + + // First field in the output file is the number of fields+sets saved + fuzzyFilterFileOutput.writeInt(nonSaturatedSets.size()); + for (Map.Entry<FieldInfo, FuzzySet> entry : nonSaturatedSets) { + FieldInfo fieldInfo = entry.getKey(); + FuzzySet fuzzySet = entry.getValue(); + saveAppropriatelySizedFuzzySet(fuzzyFilterFileOutput, fuzzySet, fieldInfo); + } + CodecUtil.writeFooter(fuzzyFilterFileOutput); + } + // We are done with large bitsets so no need to keep them hanging around + fuzzySets.clear(); + IOUtils.closeWhileHandlingException(closeables); + } + + private void saveAppropriatelySizedFuzzySet(IndexOutput fileOutput, FuzzySet fuzzySet, FieldInfo fieldInfo) throws IOException { + fileOutput.writeInt(fieldInfo.number); + fileOutput.writeString(fuzzySet.setType().getSetName()); + fuzzySet.writeTo(fileOutput); + } + } + + @Override + public String toString() { + return "FuzzyFilterPostingsFormat(" + delegatePostingsFormat + ")"; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java new file mode 100644 index 0000000000000..df443ffbca33d --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySet.java @@ -0,0 +1,98 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Accountable; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedFunction; + +import java.io.Closeable; +import java.io.IOException; +import java.util.List; + +/** + * Fuzzy Filter interface + */ +public interface FuzzySet extends Accountable, Closeable { + + /** + * Name used for a codec to be aware of what fuzzy set has been used. + */ + SetType setType(); + + /** + * @param value the item whose membership needs to be checked. + */ + Result contains(BytesRef value); + + boolean isSaturated(); + + void writeTo(DataOutput out) throws IOException; + + /** + * Enum to represent result of membership check on a fuzzy set. + */ + enum Result { + /** + * A definite no for the set membership of an item. + */ + NO, + + /** + * Fuzzy sets cannot guarantee that a given item is present in the set or not due the data being stored in + * a lossy format (e.g. fingerprint, hash). + * Hence, we return a response denoting that the item maybe present. + */ + MAYBE + } + + /** + * Enum to declare supported properties and mappings for a fuzzy set implementation. + */ + enum SetType { + BLOOM_FILTER_V1("bloom_filter_v1", BloomFilter::new, List.of("bloom_filter")); + + /** + * Name persisted in postings file. This will be used when reading to determine the bloom filter implementation. + */ + private final String setName; + + /** + * Interface for reading the actual fuzzy set implementation into java object. + */ + private final CheckedFunction<IndexInput, ? extends FuzzySet, IOException> deserializer; + + SetType(String setName, CheckedFunction<IndexInput, ? extends FuzzySet, IOException> deserializer, List<String> aliases) { + if (aliases.size() < 1) { + throw new IllegalArgumentException("Alias list is empty. Could not create Set Type: " + setName); + } + this.setName = setName; + this.deserializer = deserializer; + } + + public String getSetName() { + return setName; + } + + public CheckedFunction<IndexInput, ? extends FuzzySet, IOException> getDeserializer() { + return deserializer; + } + + public static SetType from(String name) { + for (SetType type : SetType.values()) { + if (type.setName.equals(name)) { + return type; + } + } + throw new IllegalArgumentException("There is no implementation for fuzzy set: " + name); + } + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java new file mode 100644 index 0000000000000..5d1fd03f099d4 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetFactory.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.CheckedSupplier; + +import java.io.IOException; +import java.util.Iterator; +import java.util.Map; + +/** + * Factory class to create fuzzy set. + * Supports bloom filters for now. More sets can be added as required. + */ +public class FuzzySetFactory { + + private final Map<String, FuzzySetParameters> setTypeForField; + + public FuzzySetFactory(Map<String, FuzzySetParameters> setTypeForField) { + this.setTypeForField = setTypeForField; + } + + public FuzzySet createFuzzySet(int maxDocs, String fieldName, CheckedSupplier<Iterator<BytesRef>, IOException> iteratorProvider) + throws IOException { + FuzzySetParameters params = setTypeForField.get(fieldName); + if (params == null) { + throw new IllegalArgumentException("No fuzzy set defined for field: " + fieldName); + } + switch (params.getSetType()) { + case BLOOM_FILTER_V1: + return new BloomFilter(maxDocs, params.getFalsePositiveProbability(), iteratorProvider); + default: + throw new IllegalArgumentException("No Implementation for set type: " + params.getSetType()); + } + } + + public static FuzzySet deserializeFuzzySet(IndexInput in) throws IOException { + FuzzySet.SetType setType = FuzzySet.SetType.from(in.readString()); + return setType.getDeserializer().apply(in); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java new file mode 100644 index 0000000000000..7bb96e7c34f0b --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/FuzzySetParameters.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import java.util.function.Supplier; + +/** + * Wrapper for params to create a fuzzy set. + */ +public class FuzzySetParameters { + private final Supplier<Double> falsePositiveProbabilityProvider; + private final FuzzySet.SetType setType; + + public static final double DEFAULT_FALSE_POSITIVE_PROBABILITY = 0.2047d; + + public FuzzySetParameters(Supplier<Double> falsePositiveProbabilityProvider) { + this.falsePositiveProbabilityProvider = falsePositiveProbabilityProvider; + this.setType = FuzzySet.SetType.BLOOM_FILTER_V1; + } + + public double getFalsePositiveProbability() { + return falsePositiveProbabilityProvider.get(); + } + + public FuzzySet.SetType getSetType() { + return setType; + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java new file mode 100644 index 0000000000000..08d6059c1e82e --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/IndexInputImmutableLongArray.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.RamUsageEstimator; +import org.opensearch.OpenSearchException; +import org.opensearch.common.util.LongArray; + +import java.io.IOException; + +/** + * A Long array backed by RandomAccessInput. + * This implementation supports read operations only. + */ +class IndexInputImmutableLongArray implements LongArray { + + private final RandomAccessInput input; + private final long size; + + IndexInputImmutableLongArray(long size, RandomAccessInput input) { + this.size = size; + this.input = input; + } + + @Override + public void close() {} + + @Override + public long size() { + return size; + } + + @Override + public synchronized long get(long index) { + try { + // Multiplying by 8 since each long is 8 bytes, and we need to get the long value at (index * 8) in the + // RandomAccessInput being accessed. + return input.readLong(index << 3); + } catch (IOException ex) { + throw new OpenSearchException(ex); + } + } + + @Override + public long set(long index, long value) { + throw new UnsupportedOperationException(); + } + + @Override + public long increment(long index, long inc) { + throw new UnsupportedOperationException(); + } + + @Override + public void fill(long fromIndex, long toIndex, long value) { + throw new UnsupportedOperationException(); + } + + @Override + public long ramBytesUsed() { + return RamUsageEstimator.shallowSizeOfInstance(IndexInputImmutableLongArray.class); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java new file mode 100644 index 0000000000000..bd4936aeec366 --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/LongArrayBackedBitSet.java @@ -0,0 +1,105 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.Accountable; +import org.opensearch.common.util.BigArrays; +import org.opensearch.common.util.LongArray; +import org.opensearch.common.util.io.IOUtils; + +import java.io.Closeable; +import java.io.IOException; + +/** + * A bitset backed by a long-indexed array. + */ +class LongArrayBackedBitSet implements Accountable, Closeable { + + private long underlyingArrayLength = 0L; + private LongArray longArray; + + /** + * Constructor which uses an on heap array. This should be using during construction of the bitset. + * @param capacity The maximum capacity to provision for the bitset. + */ + LongArrayBackedBitSet(long capacity) { + // Since the bitset is backed by a long array, we only need 1 element for every 64 bits in the underlying array. + underlyingArrayLength = (capacity >> 6) + 1L; + this.longArray = BigArrays.NON_RECYCLING_INSTANCE.withCircuitBreaking().newLongArray(underlyingArrayLength); + } + + /** + * Constructor which uses Lucene's IndexInput to read the bitset into a read-only buffer. + * @param in IndexInput containing the serialized bitset. + * @throws IOException + */ + LongArrayBackedBitSet(IndexInput in) throws IOException { + underlyingArrayLength = in.readLong(); + // Multiplying by 8 since the length above is of the long array, so we will have + // 8 times the number of bytes in our stream. + long streamLength = underlyingArrayLength << 3; + this.longArray = new IndexInputImmutableLongArray(underlyingArrayLength, in.randomAccessSlice(in.getFilePointer(), streamLength)); + in.skipBytes(streamLength); + } + + public void writeTo(DataOutput out) throws IOException { + out.writeLong(underlyingArrayLength); + for (int idx = 0; idx < underlyingArrayLength; idx++) { + out.writeLong(longArray.get(idx)); + } + } + + /** + * This is an O(n) operation, and will iterate over all the elements in the underlying long array + * to determine cardinality of the set. + * @return number of set bits in the bitset. + */ + public long cardinality() { + long tot = 0; + for (int i = 0; i < underlyingArrayLength; ++i) { + tot += Long.bitCount(longArray.get(i)); + } + return tot; + } + + /** + * Retrieves whether the bit is set or not at the given index. + * @param index the index to look up for the bit + * @return true if bit is set, false otherwise + */ + public boolean get(long index) { + long i = index >> 6; // div 64 + long val = longArray.get(i); + long bitmask = 1L << index; + return (val & bitmask) != 0; + } + + /** + * Sets the bit at the given index. + * @param index the index to set the bit at. + */ + public void set(long index) { + long wordNum = index >> 6; // div 64 + long bitmask = 1L << index; + long val = longArray.get(wordNum); + longArray.set(wordNum, val | bitmask); + } + + @Override + public long ramBytesUsed() { + return 128L + longArray.ramBytesUsed(); + } + + @Override + public void close() throws IOException { + IOUtils.close(longArray); + } +} diff --git a/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java b/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java new file mode 100644 index 0000000000000..7aeac68cd192a --- /dev/null +++ b/server/src/main/java/org/opensearch/index/codec/fuzzy/package-info.java @@ -0,0 +1,10 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** classes responsible for handling all fuzzy codecs and operations */ +package org.opensearch.index.codec.fuzzy; diff --git a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java index d4a97f0267222..34aecfc62b8b2 100644 --- a/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java +++ b/server/src/main/java/org/opensearch/index/engine/SegmentsStats.java @@ -41,6 +41,7 @@ import org.opensearch.core.xcontent.ToXContentFragment; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.index.ReplicationStats; +import org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat; import org.opensearch.index.remote.RemoteSegmentStats; import java.io.IOException; @@ -95,7 +96,8 @@ public class SegmentsStats implements Writeable, ToXContentFragment { Map.entry("tvx", "Term Vector Index"), Map.entry("tvd", "Term Vector Documents"), Map.entry("tvf", "Term Vector Fields"), - Map.entry("liv", "Live Documents") + Map.entry("liv", "Live Documents"), + Map.entry(FuzzyFilterPostingsFormat.FUZZY_FILTER_FILE_EXTENSION, "Fuzzy Filter") ); public SegmentsStats() { diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat index 2c92f0ecd3f51..80b1d25064885 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat @@ -1 +1,2 @@ org.apache.lucene.search.suggest.document.Completion50PostingsFormat +org.opensearch.index.codec.fuzzy.FuzzyFilterPostingsFormat diff --git a/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java b/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java new file mode 100644 index 0000000000000..92669d5bc1d92 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/fuzzy/BloomFilterTests.java @@ -0,0 +1,82 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.store.ByteArrayDataOutput; +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.lucene.store.ByteArrayIndexInput; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.Iterator; +import java.util.List; + +public class BloomFilterTests extends OpenSearchTestCase { + + public void testBloomFilterSerializationDeserialization() throws IOException { + int elementCount = randomIntBetween(1, 100); + long maxDocs = elementCount * 10L; // Keeping this high so that it ensures some bits are not set. + BloomFilter filter = new BloomFilter(maxDocs, getFpp(), () -> idIterator(elementCount)); + byte[] buffer = new byte[(int) maxDocs * 5]; + ByteArrayDataOutput out = new ByteArrayDataOutput(buffer); + + // Write in the format readable through factory + out.writeString(filter.setType().getSetName()); + filter.writeTo(out); + + FuzzySet reconstructedFilter = FuzzySetFactory.deserializeFuzzySet(new ByteArrayIndexInput("filter", buffer)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, reconstructedFilter.setType()); + + Iterator<BytesRef> idIterator = idIterator(elementCount); + while (idIterator.hasNext()) { + BytesRef element = idIterator.next(); + assertEquals(FuzzySet.Result.MAYBE, reconstructedFilter.contains(element)); + assertEquals(FuzzySet.Result.MAYBE, filter.contains(element)); + } + } + + public void testBloomFilterIsSaturated_returnsTrue() throws IOException { + BloomFilter bloomFilter = new BloomFilter(1L, getFpp(), () -> idIterator(1000)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + assertEquals(true, bloomFilter.isSaturated()); + } + + public void testBloomFilterIsSaturated_returnsFalse() throws IOException { + int elementCount = randomIntBetween(1, 100); + BloomFilter bloomFilter = new BloomFilter(20000, getFpp(), () -> idIterator(elementCount)); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + assertEquals(false, bloomFilter.isSaturated()); + } + + public void testBloomFilterWithLargeCapacity() throws IOException { + long maxDocs = randomLongBetween(Integer.MAX_VALUE, 5L * Integer.MAX_VALUE); + BloomFilter bloomFilter = new BloomFilter(maxDocs, getFpp(), () -> List.of(new BytesRef("bar")).iterator()); + assertEquals(FuzzySet.SetType.BLOOM_FILTER_V1, bloomFilter.setType()); + } + + private double getFpp() { + return randomDoubleBetween(0.01, 0.50, true); + } + + private Iterator<BytesRef> idIterator(int count) { + return new Iterator<BytesRef>() { + int cnt = count; + + @Override + public boolean hasNext() { + return cnt-- > 0; + } + + @Override + public BytesRef next() { + return new BytesRef(Integer.toString(cnt)); + } + }; + } +} diff --git a/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java b/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java new file mode 100644 index 0000000000000..868c2175d0689 --- /dev/null +++ b/server/src/test/java/org/opensearch/index/codec/fuzzy/FuzzyFilterPostingsFormatTests.java @@ -0,0 +1,34 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.index.codec.fuzzy; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.BasePostingsFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; + +import java.util.TreeMap; + +public class FuzzyFilterPostingsFormatTests extends BasePostingsFormatTestCase { + + private TreeMap<String, FuzzySetParameters> params = new TreeMap<>() { + @Override + public FuzzySetParameters get(Object k) { + return new FuzzySetParameters(() -> FuzzySetParameters.DEFAULT_FALSE_POSITIVE_PROBABILITY); + } + }; + + private Codec fuzzyFilterCodec = TestUtil.alwaysPostingsFormat( + new FuzzyFilterPostingsFormat(TestUtil.getDefaultPostingsFormat(), new FuzzySetFactory(params)) + ); + + @Override + protected Codec getCodec() { + return fuzzyFilterCodec; + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 72d68ffb9449d..0f9ab3aa3d64f 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -197,6 +197,8 @@ import static org.opensearch.core.common.util.CollectionUtils.eagerPartition; import static org.opensearch.discovery.DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING; import static org.opensearch.discovery.SettingsBasedSeedHostsProvider.DISCOVERY_SEED_HOSTS_SETTING; +import static org.opensearch.index.IndexSettings.INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING; +import static org.opensearch.index.IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.test.XContentTestUtils.convertToMap; @@ -630,6 +632,11 @@ public Settings indexSettings() { ); } + if (randomBoolean()) { + builder.put(INDEX_DOC_ID_FUZZY_SET_ENABLED_SETTING.getKey(), true); + builder.put(INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING.getKey(), randomDoubleBetween(0.01, 0.50, true)); + } + return builder.build(); } @@ -646,6 +653,9 @@ protected Settings featureFlagSettings() { } // Enabling Telemetry setting by default featureSettings.put(FeatureFlags.TELEMETRY_SETTING.getKey(), true); + + // Enabling fuzzy set for tests by default + featureSettings.put(FeatureFlags.DOC_ID_FUZZY_SET_SETTING.getKey(), true); return featureSettings.build(); } From 60b2ac4f42a389d5ebe085bd1b365ae745fc493f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 10:07:50 -0800 Subject: [PATCH 171/178] Bump org.gradle.test-retry from 1.5.4 to 1.5.8 (#12168) * Bump org.gradle.test-retry from 1.5.4 to 1.5.8 Bumps org.gradle.test-retry from 1.5.4 to 1.5.8. --- updated-dependencies: - dependency-name: org.gradle.test-retry dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> --- CHANGELOG.md | 1 + build.gradle | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 18d0e431a4b5c..0e5741f91928d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -187,6 +187,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `reactor-core` from 3.5.11 to 3.5.14 ([#12042](https://github.com/opensearch-project/OpenSearch/pull/12042)) - Bump `com.google.http-client:google-http-client-jackson2` from 1.43.3 to 1.44.1 ([#12059](https://github.com/opensearch-project/OpenSearch/pull/12059)) - Bump `peter-evans/create-issue-from-file` from 4 to 5 ([#12057](https://github.com/opensearch-project/OpenSearch/pull/12057)) +- Bump `org.gradle.test-retry` from 1.5.4 to 1.5.8 ([#12168](https://github.com/opensearch-project/OpenSearch/pull/12168)) ### Changed - Mute the query profile IT with concurrent execution ([#9840](https://github.com/opensearch-project/OpenSearch/pull/9840)) diff --git a/build.gradle b/build.gradle index 375ab91e99e94..6f9aa0ea9e439 100644 --- a/build.gradle +++ b/build.gradle @@ -55,7 +55,7 @@ plugins { id 'opensearch.docker-support' id 'opensearch.global-build-info' id "com.diffplug.spotless" version "6.25.0" apply false - id "org.gradle.test-retry" version "1.5.4" apply false + id "org.gradle.test-retry" version "1.5.8" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' } From c51818af03ab593b76d9e177753a123f0a15861e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 5 Feb 2024 16:17:22 -0500 Subject: [PATCH 172/178] Bump com.google.api:gax-httpjson from 2.39.0 to 2.42.0 in /plugins/repository-gcs (#12165) * Bump com.google.api:gax-httpjson in /plugins/repository-gcs Bumps com.google.api:gax-httpjson from 2.39.0 to 2.42.0. --- updated-dependencies: - dependency-name: com.google.api:gax-httpjson dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] <support@github.com> * Updating SHAs Signed-off-by: dependabot[bot] <support@github.com> * Update changelog Signed-off-by: dependabot[bot] <support@github.com> * Update missing classes Signed-off-by: Kunal Kotwani <kkotwani@amazon.com> --------- Signed-off-by: dependabot[bot] <support@github.com> Signed-off-by: Kunal Kotwani <kkotwani@amazon.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] <dependabot[bot]@users.noreply.github.com> Co-authored-by: Kunal Kotwani <kkotwani@amazon.com> --- CHANGELOG.md | 2 +- plugins/repository-gcs/build.gradle | 7 +++++-- .../repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 | 1 - .../repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 | 1 + 4 files changed, 7 insertions(+), 4 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e5741f91928d..5f5ac4bfb1ffa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -180,7 +180,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `lycheeverse/lychee-action` from 1.8.0 to 1.9.1 ([#11795](https://github.com/opensearch-project/OpenSearch/pull/11795), [#11887](https://github.com/opensearch-project/OpenSearch/pull/11887)) - Bump `Lucene` from 9.8.0 to 9.9.2 ([#11421](https://github.com/opensearch-project/OpenSearch/pull/11421)), ([#12063](https://github.com/opensearch-project/OpenSearch/pull/12063)) - Bump `com.networknt:json-schema-validator` from 1.0.86 to 1.2.0 ([#11886](https://github.com/opensearch-project/OpenSearch/pull/11886), [#11963](https://github.com/opensearch-project/OpenSearch/pull/11963)) -- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.39.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794)) +- Bump `com.google.api:gax-httpjson` from 0.103.1 to 2.42.0 ([#11794](https://github.com/opensearch-project/OpenSearch/pull/11794), [#12165](https://github.com/opensearch-project/OpenSearch/pull/12165)) - Bump `com.google.oauth-client:google-oauth-client` from 1.34.1 to 1.35.0 ([#11960](https://github.com/opensearch-project/OpenSearch/pull/11960)) - Bump `com.diffplug.spotless` from 6.23.2 to 6.25.0 ([#11962](https://github.com/opensearch-project/OpenSearch/pull/11962), [#12055](https://github.com/opensearch-project/OpenSearch/pull/12055)) - Bump `com.google.cloud:google-cloud-core` from 2.5.10 to 2.30.0 ([#11961](https://github.com/opensearch-project/OpenSearch/pull/11961)) diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 2ce67b1f25a80..b28f97677b0df 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -54,7 +54,7 @@ versions << [ dependencies { api 'com.google.api:api-common:1.8.1' api 'com.google.api:gax:2.35.0' - api 'com.google.api:gax-httpjson:2.39.0' + api 'com.google.api:gax-httpjson:2.42.0' api 'com.google.apis:google-api-services-storage:v1-rev20230617-2.0.0' @@ -206,7 +206,10 @@ thirdPartyAudit { // commons-logging provided dependencies 'javax.jms.Message', 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener' + 'javax.servlet.ServletContextListener', + // Bump for gax 2.42.0 + 'com.google.api.gax.rpc.EndpointContext', + 'com.google.api.gax.rpc.RequestMutator' ) } diff --git a/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 deleted file mode 100644 index d0f3b8dfa6d25..0000000000000 --- a/plugins/repository-gcs/licenses/gax-httpjson-2.39.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ebff086f21c54c1eb02056f94255b4f1836a8efe \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 new file mode 100644 index 0000000000000..672506572ed4d --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-2.42.0.jar.sha1 @@ -0,0 +1 @@ +4db06bc31c2fb34b0490362e8666c20fdc1fb3f2 \ No newline at end of file From a4bc4af5773d0ba8bd1a9d09a03a034e7b833992 Mon Sep 17 00:00:00 2001 From: Gagan Juneja <gagandeepjuneja@gmail.com> Date: Tue, 6 Feb 2024 04:16:37 +0530 Subject: [PATCH 173/178] [Metric Framework] Adds support for Histogram metric (#12062) * [Metric Framework] Adds support for Histogram metric Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Adds test Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Addresses review comments Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Adds change log Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Fixed spotless Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Fixes javadoc Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Fixes javadoc Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Fixes test Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Removes explicit approach Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Removes explicit approach Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> * Addresses review comments Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> --------- Signed-off-by: Gagan Juneja <gjjuneja@amazon.com> Co-authored-by: Gagan Juneja <gjjuneja@amazon.com> --- CHANGELOG.md | 1 + .../metrics/DefaultMetricsRegistry.java | 5 +++ .../telemetry/metrics/Histogram.java | 35 ++++++++++++++++ .../telemetry/metrics/MetricsRegistry.java | 11 +++++ .../telemetry/metrics/noop/NoopHistogram.java | 38 ++++++++++++++++++ .../metrics/noop/NoopMetricsRegistry.java | 6 +++ .../metrics/DefaultMetricsRegistryTests.java | 11 +++++ .../TelemetryMetricsDisabledSanityIT.java | 4 ++ .../TelemetryMetricsEnabledSanityIT.java | 26 ++++++++++++ .../telemetry/metrics/OTelHistogram.java | 40 +++++++++++++++++++ .../metrics/OTelMetricsTelemetry.java | 18 +++++++++ .../tracing/OTelResourceProvider.java | 9 +++++ .../metrics/OTelMetricsTelemetryTests.java | 31 ++++++++++++++ .../test/telemetry/MockTelemetry.java | 7 ++++ 14 files changed, 242 insertions(+) create mode 100644 libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java create mode 100644 libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java create mode 100644 plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 5f5ac4bfb1ffa..50a1a6ea99243 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -138,6 +138,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add support for Google Application Default Credentials in repository-gcs ([#8394](https://github.com/opensearch-project/OpenSearch/pull/8394)) - Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074)) - Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022)) +- [Metrics Framework] Adds support for Histogram metric ([#12062](https://github.com/opensearch-project/OpenSearch/pull/12062)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java index d57def9406b17..f38fdd6412d79 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistry.java @@ -34,6 +34,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return metricsTelemetry.createUpDownCounter(name, description, unit); } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return metricsTelemetry.createHistogram(name, description, unit); + } + @Override public void close() throws IOException { metricsTelemetry.close(); diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java new file mode 100644 index 0000000000000..95ada626e21ee --- /dev/null +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/Histogram.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics; + +import org.opensearch.common.annotation.ExperimentalApi; +import org.opensearch.telemetry.metrics.tags.Tags; + +/** + * Histogram records the value for an existing metric. + * {@opensearch.experimental} + */ +@ExperimentalApi +public interface Histogram { + + /** + * record value. + * @param value value to be added. + */ + void record(double value); + + /** + * record value along with the attributes. + * + * @param value value to be added. + * @param tags attributes/dimensions of the metric. + */ + void record(double value, Tags tags); + +} diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java index 61b3df089928b..94d19bda31f34 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/MetricsRegistry.java @@ -36,4 +36,15 @@ public interface MetricsRegistry extends Closeable { * @return counter. */ Counter createUpDownCounter(String name, String description, String unit); + + /** + * Creates the histogram type of Metric. Implementation framework will take care + * of the bucketing strategy. + * + * @param name name of the histogram. + * @param description any description about the metric. + * @param unit unit of the metric. + * @return histogram. + */ + Histogram createHistogram(String name, String description, String unit); } diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java new file mode 100644 index 0000000000000..20e72bccad899 --- /dev/null +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopHistogram.java @@ -0,0 +1,38 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics.noop; + +import org.opensearch.common.annotation.InternalApi; +import org.opensearch.telemetry.metrics.Histogram; +import org.opensearch.telemetry.metrics.tags.Tags; + +/** + * No-op {@link Histogram} + * {@opensearch.internal} + */ +@InternalApi +public class NoopHistogram implements Histogram { + + /** + * No-op Histogram instance + */ + public final static NoopHistogram INSTANCE = new NoopHistogram(); + + private NoopHistogram() {} + + @Override + public void record(double value) { + + } + + @Override + public void record(double value, Tags tags) { + + } +} diff --git a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java index 640c6842a8960..d3dda68cfae71 100644 --- a/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java +++ b/libs/telemetry/src/main/java/org/opensearch/telemetry/metrics/noop/NoopMetricsRegistry.java @@ -10,6 +10,7 @@ import org.opensearch.common.annotation.InternalApi; import org.opensearch.telemetry.metrics.Counter; +import org.opensearch.telemetry.metrics.Histogram; import org.opensearch.telemetry.metrics.MetricsRegistry; import java.io.IOException; @@ -38,6 +39,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return NoopCounter.INSTANCE; } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return NoopHistogram.INSTANCE; + } + @Override public void close() throws IOException { diff --git a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java index 6171641db5f07..02f126075845b 100644 --- a/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java +++ b/libs/telemetry/src/test/java/org/opensearch/telemetry/metrics/DefaultMetricsRegistryTests.java @@ -48,4 +48,15 @@ public void testUpDownCounter() { assertSame(mockCounter, counter); } + public void testHistogram() { + Histogram mockHistogram = mock(Histogram.class); + when(defaultMeterRegistry.createHistogram(any(String.class), any(String.class), any(String.class))).thenReturn(mockHistogram); + Histogram histogram = defaultMeterRegistry.createHistogram( + "org.opensearch.telemetry.metrics.DefaultMeterRegistryTests.testHistogram", + "test up-down counter", + "ms" + ); + assertSame(mockHistogram, histogram); + } + } diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java index bcdcb657c4f42..e77e69d121036 100644 --- a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java +++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsDisabledSanityIT.java @@ -15,6 +15,7 @@ import org.opensearch.telemetry.OTelTelemetrySettings; import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.telemetry.metrics.noop.NoopCounter; +import org.opensearch.telemetry.metrics.noop.NoopHistogram; import org.opensearch.telemetry.metrics.noop.NoopMetricsRegistry; import org.opensearch.test.OpenSearchIntegTestCase; @@ -53,10 +54,13 @@ public void testSanityChecksWhenMetricsDisabled() throws Exception { Counter counter = metricsRegistry.createCounter("test-counter", "test", "1"); counter.add(1.0); + Histogram histogram = metricsRegistry.createHistogram("test-histogram", "test", "1"); + Thread.sleep(2000); assertTrue(metricsRegistry instanceof NoopMetricsRegistry); assertTrue(counter instanceof NoopCounter); + assertTrue(histogram instanceof NoopHistogram); } } diff --git a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java index ed341595d327d..1b8f694709a9c 100644 --- a/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java +++ b/plugins/telemetry-otel/src/internalClusterTest/java/org/opensearch/telemetry/metrics/TelemetryMetricsEnabledSanityIT.java @@ -23,6 +23,7 @@ import java.util.stream.Collectors; import io.opentelemetry.sdk.metrics.data.DoublePointData; +import io.opentelemetry.sdk.metrics.internal.data.ImmutableExponentialHistogramPointData; @OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.SUITE, minNumDataNodes = 1) public class TelemetryMetricsEnabledSanityIT extends OpenSearchIntegTestCase { @@ -92,6 +93,31 @@ public void testUpDownCounter() throws Exception { assertEquals(-1.0, value, 0.0); } + public void testHistogram() throws Exception { + MetricsRegistry metricsRegistry = internalCluster().getInstance(MetricsRegistry.class); + InMemorySingletonMetricsExporter.INSTANCE.reset(); + + Histogram histogram = metricsRegistry.createHistogram("test-histogram", "test", "ms"); + histogram.record(2.0); + histogram.record(1.0); + histogram.record(3.0); + // Sleep for about 2s to wait for metrics to be published. + Thread.sleep(2000); + + InMemorySingletonMetricsExporter exporter = InMemorySingletonMetricsExporter.INSTANCE; + ImmutableExponentialHistogramPointData histogramPointData = ((ImmutableExponentialHistogramPointData) ((ArrayList) exporter + .getFinishedMetricItems() + .stream() + .filter(a -> a.getName().contains("test-histogram")) + .collect(Collectors.toList()) + .get(0) + .getExponentialHistogramData() + .getPoints()).get(0)); + assertEquals(1.0, histogramPointData.getSum(), 6.0); + assertEquals(1.0, histogramPointData.getMax(), 3.0); + assertEquals(1.0, histogramPointData.getMin(), 1.0); + } + @After public void reset() { InMemorySingletonMetricsExporter.INSTANCE.reset(); diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java new file mode 100644 index 0000000000000..73bb0d8adff62 --- /dev/null +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelHistogram.java @@ -0,0 +1,40 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.telemetry.metrics; + +import org.opensearch.telemetry.OTelAttributesConverter; +import org.opensearch.telemetry.metrics.tags.Tags; + +import io.opentelemetry.api.metrics.DoubleHistogram; + +/** + * OTel aware implementation {@link Histogram} + */ +class OTelHistogram implements Histogram { + + private final DoubleHistogram otelDoubleHistogram; + + /** + * Constructor + * @param otelDoubleCounter delegate counter. + */ + public OTelHistogram(DoubleHistogram otelDoubleCounter) { + this.otelDoubleHistogram = otelDoubleCounter; + } + + @Override + public void record(double value) { + otelDoubleHistogram.record(value); + } + + @Override + public void record(double value, Tags tags) { + otelDoubleHistogram.record(value, OTelAttributesConverter.convert(tags)); + } +} diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java index af235b9604669..82ae2cdd198b2 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetry.java @@ -17,6 +17,7 @@ import java.security.PrivilegedAction; import io.opentelemetry.api.metrics.DoubleCounter; +import io.opentelemetry.api.metrics.DoubleHistogram; import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; @@ -68,6 +69,23 @@ public Counter createUpDownCounter(String name, String description, String unit) return new OTelUpDownCounter(doubleUpDownCounter); } + /** + * Creates the Otel Histogram. In {@link org.opensearch.telemetry.tracing.OTelResourceProvider} + * we can configure the bucketing/aggregation strategy through view. Default startegy configured + * is the {@link io.opentelemetry.sdk.metrics.internal.view.Base2ExponentialHistogramAggregation}. + * @param name name of the histogram. + * @param description any description about the metric. + * @param unit unit of the metric. + * @return histogram + */ + @Override + public Histogram createHistogram(String name, String description, String unit) { + DoubleHistogram doubleHistogram = AccessController.doPrivileged( + (PrivilegedAction<DoubleHistogram>) () -> otelMeter.histogramBuilder(name).setUnit(unit).setDescription(description).build() + ); + return new OTelHistogram(doubleHistogram); + } + @Override public void close() throws IOException { meterProvider.close(); diff --git a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java index b1a45f2e7c2d2..39b97b7d6441a 100644 --- a/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java +++ b/plugins/telemetry-otel/src/main/java/org/opensearch/telemetry/tracing/OTelResourceProvider.java @@ -23,8 +23,12 @@ import io.opentelemetry.api.trace.propagation.W3CTraceContextPropagator; import io.opentelemetry.context.propagation.ContextPropagators; import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.metrics.InstrumentSelector; +import io.opentelemetry.sdk.metrics.InstrumentType; import io.opentelemetry.sdk.metrics.SdkMeterProvider; +import io.opentelemetry.sdk.metrics.View; import io.opentelemetry.sdk.metrics.export.PeriodicMetricReader; +import io.opentelemetry.sdk.metrics.internal.view.Base2ExponentialHistogramAggregation; import io.opentelemetry.sdk.resources.Resource; import io.opentelemetry.sdk.trace.SdkTracerProvider; import io.opentelemetry.sdk.trace.export.BatchSpanProcessor; @@ -40,6 +44,7 @@ * This class encapsulates all OpenTelemetry related resources */ public final class OTelResourceProvider { + private OTelResourceProvider() {} /** @@ -92,6 +97,10 @@ private static SdkMeterProvider createSdkMetricProvider(Settings settings, Resou .setInterval(TelemetrySettings.METRICS_PUBLISH_INTERVAL_SETTING.get(settings).getSeconds(), TimeUnit.SECONDS) .build() ) + .registerView( + InstrumentSelector.builder().setType(InstrumentType.HISTOGRAM).build(), + View.builder().setAggregation(Base2ExponentialHistogramAggregation.getDefault()).build() + ) .build(); } diff --git a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java index 9de575b69774a..4b39e3d0d607d 100644 --- a/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java +++ b/plugins/telemetry-otel/src/test/java/org/opensearch/telemetry/metrics/OTelMetricsTelemetryTests.java @@ -17,12 +17,15 @@ import io.opentelemetry.api.OpenTelemetry; import io.opentelemetry.api.metrics.DoubleCounter; import io.opentelemetry.api.metrics.DoubleCounterBuilder; +import io.opentelemetry.api.metrics.DoubleHistogram; +import io.opentelemetry.api.metrics.DoubleHistogramBuilder; import io.opentelemetry.api.metrics.DoubleUpDownCounter; import io.opentelemetry.api.metrics.DoubleUpDownCounterBuilder; import io.opentelemetry.api.metrics.LongCounterBuilder; import io.opentelemetry.api.metrics.LongUpDownCounterBuilder; import io.opentelemetry.api.metrics.Meter; import io.opentelemetry.api.metrics.MeterProvider; +import org.mockito.Mockito; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -118,4 +121,32 @@ public void testUpDownCounter() { counter.add(-2.0, tags); verify(mockOTelUpDownDoubleCounter).add((-2.0), OTelAttributesConverter.convert(tags)); } + + @SuppressWarnings({ "rawtypes", "unchecked" }) + public void testHistogram() { + String histogramName = "test-histogram"; + String description = "test"; + String unit = "1"; + Meter mockMeter = mock(Meter.class); + OpenTelemetry mockOpenTelemetry = mock(OpenTelemetry.class); + DoubleHistogram mockOTelDoubleHistogram = mock(DoubleHistogram.class); + DoubleHistogramBuilder mockOTelDoubleHistogramBuilder = mock(DoubleHistogramBuilder.class); + MeterProvider meterProvider = mock(MeterProvider.class); + when(meterProvider.get(OTelTelemetryPlugin.INSTRUMENTATION_SCOPE_NAME)).thenReturn(mockMeter); + MetricsTelemetry metricsTelemetry = new OTelMetricsTelemetry( + new RefCountedReleasable("telemetry", mockOpenTelemetry, () -> {}), + meterProvider + ); + when(mockMeter.histogramBuilder(Mockito.contains(histogramName))).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.setDescription(description)).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.setUnit(unit)).thenReturn(mockOTelDoubleHistogramBuilder); + when(mockOTelDoubleHistogramBuilder.build()).thenReturn(mockOTelDoubleHistogram); + + Histogram histogram = metricsTelemetry.createHistogram(histogramName, description, unit); + histogram.record(1.0); + verify(mockOTelDoubleHistogram).record(1.0); + Tags tags = Tags.create().addTag("test", "test"); + histogram.record(2.0, tags); + verify(mockOTelDoubleHistogram).record(2.0, OTelAttributesConverter.convert(tags)); + } } diff --git a/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java b/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java index dda413ce2818e..44daf1b1554e0 100644 --- a/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java +++ b/test/framework/src/main/java/org/opensearch/test/telemetry/MockTelemetry.java @@ -11,8 +11,10 @@ import org.opensearch.telemetry.Telemetry; import org.opensearch.telemetry.TelemetrySettings; import org.opensearch.telemetry.metrics.Counter; +import org.opensearch.telemetry.metrics.Histogram; import org.opensearch.telemetry.metrics.MetricsTelemetry; import org.opensearch.telemetry.metrics.noop.NoopCounter; +import org.opensearch.telemetry.metrics.noop.NoopHistogram; import org.opensearch.telemetry.tracing.TracingTelemetry; import org.opensearch.test.telemetry.tracing.MockTracingTelemetry; @@ -46,6 +48,11 @@ public Counter createUpDownCounter(String name, String description, String unit) return NoopCounter.INSTANCE; } + @Override + public Histogram createHistogram(String name, String description, String unit) { + return NoopHistogram.INSTANCE; + } + @Override public void close() { From 3cbf54e7358fd707a0d73e6c016b04ce78884d30 Mon Sep 17 00:00:00 2001 From: Chenyang Ji <cyji@amazon.com> Date: Mon, 5 Feb 2024 18:10:06 -0800 Subject: [PATCH 174/178] Query insights plugin implementation (#11903) * Query insights plugin implementation Signed-off-by: Chenyang Ji <cyji@amazon.com> * Increase JavaDoc coverage and update PR based comments Signed-off-by: Chenyang Ji <cyji@amazon.com> * Refactor record and service to make them generic Signed-off-by: Chenyang Ji <cyji@amazon.com> * refactor service for improving multithreading efficiency Signed-off-by: Chenyang Ji <cyji@amazon.com> --------- Signed-off-by: Chenyang Ji <cyji@amazon.com> --- CHANGELOG.md | 1 + plugins/query-insights/build.gradle | 18 ++ .../plugin/insights/QueryInsightsPlugin.java | 112 +++++++ .../core/service/QueryInsightsService.java | 180 +++++++++++ .../core/service/TopQueriesService.java | 282 ++++++++++++++++++ .../insights/core/service/package-info.java | 12 + .../plugin/insights/package-info.java | 12 + .../insights/rules/model/Attribute.java | 74 +++++ .../insights/rules/model/MetricType.java | 121 ++++++++ .../rules/model/SearchQueryRecord.java | 176 +++++++++++ .../insights/rules/model/package-info.java | 12 + .../settings/QueryInsightsSettings.java | 116 +++++++ .../insights/settings/package-info.java | 12 + .../insights/QueryInsightsPluginTests.java | 91 ++++++ .../insights/QueryInsightsTestUtils.java | 155 ++++++++++ .../service/QueryInsightsServiceTests.java | 49 +++ .../core/service/TopQueriesServiceTests.java | 102 +++++++ .../rules/model/SearchQueryRecordTests.java | 71 +++++ .../action/search/SearchRequest.java | 2 +- .../action/search/SearchRequestContext.java | 6 +- .../SearchRequestOperationsListener.java | 22 +- .../action/search/SearchRequestSlowLog.java | 10 +- .../action/search/SearchRequestStats.java | 6 +- ...erationsCompositeListenerFactoryTests.java | 6 +- 24 files changed, 1622 insertions(+), 26 deletions(-) create mode 100644 plugins/query-insights/build.gradle create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 50a1a6ea99243..5afdf6b707d1e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -223,6 +223,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Extract cluster management for integration tests into JUnit test rule out of OpenSearchIntegTestCase ([#11877](https://github.com/opensearch-project/OpenSearch/pull/11877)), ([#12000](https://github.com/opensearch-project/OpenSearch/pull/12000)) - Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) - Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508)) +- [Query Insights] Query Insights Framework which currently supports retrieving the most time-consuming queries within the last configured time window ([#11903](https://github.com/opensearch-project/OpenSearch/pull/11903)) ### Deprecated diff --git a/plugins/query-insights/build.gradle b/plugins/query-insights/build.gradle new file mode 100644 index 0000000000000..eabbd395bd3bd --- /dev/null +++ b/plugins/query-insights/build.gradle @@ -0,0 +1,18 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +opensearchplugin { + description 'OpenSearch Query Insights Plugin.' + classname 'org.opensearch.plugin.insights.QueryInsightsPlugin' +} + +dependencies { +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java new file mode 100644 index 0000000000000..0acd7e2dcac0d --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java @@ -0,0 +1,112 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.ActionRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.metadata.IndexNameExpressionResolver; +import org.opensearch.cluster.node.DiscoveryNodes; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.IndexScopedSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.settings.SettingsFilter; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.common.util.concurrent.OpenSearchExecutors; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.core.common.io.stream.NamedWriteableRegistry; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.env.Environment; +import org.opensearch.env.NodeEnvironment; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.plugins.Plugin; +import org.opensearch.repositories.RepositoriesService; +import org.opensearch.rest.RestController; +import org.opensearch.rest.RestHandler; +import org.opensearch.script.ScriptService; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.ScalingExecutorBuilder; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.watcher.ResourceWatcherService; + +import java.util.Collection; +import java.util.List; +import java.util.function.Supplier; + +/** + * Plugin class for Query Insights. + */ +public class QueryInsightsPlugin extends Plugin implements ActionPlugin { + /** + * Default constructor + */ + public QueryInsightsPlugin() {} + + @Override + public Collection<Object> createComponents( + final Client client, + final ClusterService clusterService, + final ThreadPool threadPool, + final ResourceWatcherService resourceWatcherService, + final ScriptService scriptService, + final NamedXContentRegistry xContentRegistry, + final Environment environment, + final NodeEnvironment nodeEnvironment, + final NamedWriteableRegistry namedWriteableRegistry, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier<RepositoriesService> repositoriesServiceSupplier + ) { + // create top n queries service + final QueryInsightsService queryInsightsService = new QueryInsightsService(threadPool); + return List.of(queryInsightsService); + } + + @Override + public List<ExecutorBuilder<?>> getExecutorBuilders(final Settings settings) { + return List.of( + new ScalingExecutorBuilder( + QueryInsightsSettings.QUERY_INSIGHTS_EXECUTOR, + 1, + Math.min((OpenSearchExecutors.allocatedProcessors(settings) + 1) / 2, QueryInsightsSettings.MAX_THREAD_COUNT), + TimeValue.timeValueMinutes(5) + ) + ); + } + + @Override + public List<RestHandler> getRestHandlers( + final Settings settings, + final RestController restController, + final ClusterSettings clusterSettings, + final IndexScopedSettings indexScopedSettings, + final SettingsFilter settingsFilter, + final IndexNameExpressionResolver indexNameExpressionResolver, + final Supplier<DiscoveryNodes> nodesInCluster + ) { + return List.of(); + } + + @Override + public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { + return List.of(); + } + + @Override + public List<Setting<?>> getSettings() { + return List.of( + // Settings for top N queries + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE + ); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java new file mode 100644 index 0000000000000..525ca0d4a3d33 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/QueryInsightsService.java @@ -0,0 +1,180 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.common.inject.Inject; +import org.opensearch.common.lifecycle.AbstractLifecycleComponent; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.threadpool.Scheduler; +import org.opensearch.threadpool.ThreadPool; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.LinkedBlockingQueue; + +/** + * Service responsible for gathering, analyzing, storing and exporting + * information related to search queries + * + * @opensearch.internal + */ +public class QueryInsightsService extends AbstractLifecycleComponent { + /** + * The internal OpenSearch thread pool that execute async processing and exporting tasks + */ + private final ThreadPool threadPool; + + /** + * Services to capture top n queries for different metric types + */ + private final Map<MetricType, TopQueriesService> topQueriesServices; + + /** + * Flags for enabling insight data collection for different metric types + */ + private final Map<MetricType, Boolean> enableCollect; + + /** + * The internal thread-safe queue to ingest the search query data and subsequently forward to processors + */ + private final LinkedBlockingQueue<SearchQueryRecord> queryRecordsQueue; + + /** + * Holds a reference to delayed operation {@link Scheduler.Cancellable} so it can be cancelled when + * the service closed concurrently. + */ + protected volatile Scheduler.Cancellable scheduledFuture; + + /** + * Constructor of the QueryInsightsService + * + * @param threadPool The OpenSearch thread pool to run async tasks + */ + @Inject + public QueryInsightsService(final ThreadPool threadPool) { + enableCollect = new HashMap<>(); + queryRecordsQueue = new LinkedBlockingQueue<>(QueryInsightsSettings.QUERY_RECORD_QUEUE_CAPACITY); + topQueriesServices = new HashMap<>(); + for (MetricType metricType : MetricType.allMetricTypes()) { + enableCollect.put(metricType, false); + topQueriesServices.put(metricType, new TopQueriesService(metricType)); + } + this.threadPool = threadPool; + } + + /** + * Ingest the query data into in-memory stores + * + * @param record the record to ingest + */ + public boolean addRecord(final SearchQueryRecord record) { + boolean shouldAdd = false; + for (Map.Entry<MetricType, TopQueriesService> entry : topQueriesServices.entrySet()) { + if (!enableCollect.get(entry.getKey())) { + continue; + } + List<SearchQueryRecord> currentSnapshot = entry.getValue().getTopQueriesCurrentSnapshot(); + // skip add to top N queries store if the incoming record is smaller than the Nth record + if (currentSnapshot.size() < entry.getValue().getTopNSize() + || SearchQueryRecord.compare(record, currentSnapshot.get(0), entry.getKey()) > 0) { + shouldAdd = true; + break; + } + } + if (shouldAdd) { + return queryRecordsQueue.offer(record); + } + return false; + } + + /** + * Drain the queryRecordsQueue into internal stores and services + */ + public void drainRecords() { + final List<SearchQueryRecord> records = new ArrayList<>(); + queryRecordsQueue.drainTo(records); + records.sort(Comparator.comparingLong(SearchQueryRecord::getTimestamp)); + for (MetricType metricType : MetricType.allMetricTypes()) { + if (enableCollect.get(metricType)) { + // ingest the records into topQueriesService + topQueriesServices.get(metricType).consumeRecords(records); + } + } + } + + /** + * Get the top queries service based on metricType + * @param metricType {@link MetricType} + * @return {@link TopQueriesService} + */ + public TopQueriesService getTopQueriesService(final MetricType metricType) { + return topQueriesServices.get(metricType); + } + + /** + * Set flag to enable or disable Query Insights data collection + * + * @param metricType {@link MetricType} + * @param enable Flag to enable or disable Query Insights data collection + */ + public void enableCollection(final MetricType metricType, final boolean enable) { + this.enableCollect.put(metricType, enable); + this.topQueriesServices.get(metricType).setEnabled(enable); + } + + /** + * Get if the Query Insights data collection is enabled for a MetricType + * + * @param metricType {@link MetricType} + * @return if the Query Insights data collection is enabled + */ + public boolean isCollectionEnabled(final MetricType metricType) { + return this.enableCollect.get(metricType); + } + + /** + * Check if query insights service is enabled + * + * @return if query insights service is enabled + */ + public boolean isEnabled() { + for (MetricType t : MetricType.allMetricTypes()) { + if (isCollectionEnabled(t)) { + return true; + } + } + return false; + } + + @Override + protected void doStart() { + if (isEnabled()) { + scheduledFuture = threadPool.scheduleWithFixedDelay( + this::drainRecords, + QueryInsightsSettings.QUERY_RECORD_QUEUE_DRAIN_INTERVAL, + QueryInsightsSettings.QUERY_INSIGHTS_EXECUTOR + ); + } + } + + @Override + protected void doStop() { + if (scheduledFuture != null) { + scheduledFuture.cancel(); + } + } + + @Override + protected void doClose() {} +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java new file mode 100644 index 0000000000000..d2c30cbdf98e7 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/TopQueriesService.java @@ -0,0 +1,282 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.common.unit.TimeValue; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; + +import java.time.Instant; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.PriorityQueue; +import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +/** + * Service responsible for gathering and storing top N queries + * with high latency or resource usage + * + * @opensearch.internal + */ +public class TopQueriesService { + private boolean enabled; + /** + * The metric type to measure top n queries + */ + private final MetricType metricType; + private int topNSize; + /** + * The window size to keep the top n queries + */ + private TimeValue windowSize; + /** + * The current window start timestamp + */ + private long windowStart; + /** + * The internal thread-safe store that holds the top n queries insight data + */ + private final PriorityQueue<SearchQueryRecord> topQueriesStore; + + /** + * The AtomicReference of a snapshot of the current window top queries for getters to consume + */ + private final AtomicReference<List<SearchQueryRecord>> topQueriesCurrentSnapshot; + + /** + * The AtomicReference of a snapshot of the last window top queries for getters to consume + */ + private final AtomicReference<List<SearchQueryRecord>> topQueriesHistorySnapshot; + + TopQueriesService(final MetricType metricType) { + this.enabled = false; + this.metricType = metricType; + this.topNSize = QueryInsightsSettings.DEFAULT_TOP_N_SIZE; + this.windowSize = QueryInsightsSettings.DEFAULT_WINDOW_SIZE; + this.windowStart = -1L; + topQueriesStore = new PriorityQueue<>(topNSize, (a, b) -> SearchQueryRecord.compare(a, b, metricType)); + topQueriesCurrentSnapshot = new AtomicReference<>(new ArrayList<>()); + topQueriesHistorySnapshot = new AtomicReference<>(new ArrayList<>()); + } + + /** + * Set the top N size for TopQueriesService service. + * + * @param topNSize the top N size to set + */ + public void setTopNSize(final int topNSize) { + this.topNSize = topNSize; + } + + /** + * Get the current configured top n size + * + * @return top n size + */ + public int getTopNSize() { + return topNSize; + } + + /** + * Validate the top N size based on the internal constrains + * + * @param size the wanted top N size + */ + public void validateTopNSize(final int size) { + if (size > QueryInsightsSettings.MAX_N_SIZE) { + throw new IllegalArgumentException( + "Top N size setting for [" + + metricType + + "]" + + " should be smaller than max top N size [" + + QueryInsightsSettings.MAX_N_SIZE + + "was (" + + size + + " > " + + QueryInsightsSettings.MAX_N_SIZE + + ")" + ); + } + } + + /** + * Set enable flag for the service + * @param enabled boolean + */ + public void setEnabled(final boolean enabled) { + this.enabled = enabled; + } + + /** + * Set the window size for top N queries service + * + * @param windowSize window size to set + */ + public void setWindowSize(final TimeValue windowSize) { + this.windowSize = windowSize; + // reset the window start time since the window size has changed + this.windowStart = -1L; + } + + /** + * Validate if the window size is valid, based on internal constrains. + * + * @param windowSize the window size to validate + */ + public void validateWindowSize(final TimeValue windowSize) { + if (windowSize.compareTo(QueryInsightsSettings.MAX_WINDOW_SIZE) > 0 + || windowSize.compareTo(QueryInsightsSettings.MIN_WINDOW_SIZE) < 0) { + throw new IllegalArgumentException( + "Window size setting for [" + + metricType + + "]" + + " should be between [" + + QueryInsightsSettings.MIN_WINDOW_SIZE + + "," + + QueryInsightsSettings.MAX_WINDOW_SIZE + + "]" + + "was (" + + windowSize + + ")" + ); + } + if (!(QueryInsightsSettings.VALID_WINDOW_SIZES_IN_MINUTES.contains(windowSize) || windowSize.getMinutes() % 60 == 0)) { + throw new IllegalArgumentException( + "Window size setting for [" + + metricType + + "]" + + " should be multiple of 1 hour, or one of " + + QueryInsightsSettings.VALID_WINDOW_SIZES_IN_MINUTES + + ", was (" + + windowSize + + ")" + ); + } + } + + /** + * Get all top queries records that are in the current top n queries store + * Optionally include top N records from the last window. + * + * By default, return the records in sorted order. + * + * @param includeLastWindow if the top N queries from the last window should be included + * @return List of the records that are in the query insight store + * @throws IllegalArgumentException if query insight is disabled in the cluster + */ + public List<SearchQueryRecord> getTopQueriesRecords(final boolean includeLastWindow) throws IllegalArgumentException { + if (!enabled) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Cannot get top n queries for [%s] when it is not enabled.", metricType.toString()) + ); + } + // read from window snapshots + final List<SearchQueryRecord> queries = new ArrayList<>(topQueriesCurrentSnapshot.get()); + if (includeLastWindow) { + queries.addAll(topQueriesHistorySnapshot.get()); + } + return Stream.of(queries) + .flatMap(Collection::stream) + .sorted((a, b) -> SearchQueryRecord.compare(a, b, metricType) * -1) + .collect(Collectors.toList()); + } + + /** + * Consume records to top queries stores + * + * @param records a list of {@link SearchQueryRecord} + */ + void consumeRecords(final List<SearchQueryRecord> records) { + final long currentWindowStart = calculateWindowStart(System.currentTimeMillis()); + List<SearchQueryRecord> recordsInLastWindow = new ArrayList<>(); + List<SearchQueryRecord> recordsInThisWindow = new ArrayList<>(); + for (SearchQueryRecord record : records) { + // skip the records that does not have the corresponding measurement + if (!record.getMeasurements().containsKey(metricType)) { + continue; + } + if (record.getTimestamp() < currentWindowStart) { + recordsInLastWindow.add(record); + } else { + recordsInThisWindow.add(record); + } + } + // add records in last window, if there are any, to the top n store + addToTopNStore(recordsInLastWindow); + // rotate window and reset window start if necessary + rotateWindowIfNecessary(currentWindowStart); + // add records in current window, if there are any, to the top n store + addToTopNStore(recordsInThisWindow); + // update the current window snapshot for getters to consume + final List<SearchQueryRecord> newSnapShot = new ArrayList<>(topQueriesStore); + newSnapShot.sort((a, b) -> SearchQueryRecord.compare(a, b, metricType)); + topQueriesCurrentSnapshot.set(newSnapShot); + } + + private void addToTopNStore(final List<SearchQueryRecord> records) { + topQueriesStore.addAll(records); + // remove top elements for fix sizing priority queue + while (topQueriesStore.size() > topNSize) { + topQueriesStore.poll(); + } + } + + /** + * Reset the current window and rotate the data to history snapshot for top n queries, + * This function would be invoked zero time or only once in each consumeRecords call + * + * @param newWindowStart the new windowStart to set to + */ + private void rotateWindowIfNecessary(final long newWindowStart) { + // reset window if the current window is outdated + if (windowStart < newWindowStart) { + final List<SearchQueryRecord> history = new ArrayList<>(); + // rotate the current window to history store only if the data belongs to the last window + if (windowStart == newWindowStart - windowSize.getMillis()) { + history.addAll(topQueriesStore); + } + topQueriesHistorySnapshot.set(history); + topQueriesStore.clear(); + topQueriesCurrentSnapshot.set(new ArrayList<>()); + windowStart = newWindowStart; + } + } + + /** + * Calculate the window start for the given timestamp + * + * @param timestamp the given timestamp to calculate window start + */ + private long calculateWindowStart(final long timestamp) { + final LocalDateTime currentTime = LocalDateTime.ofInstant(Instant.ofEpochMilli(timestamp), ZoneId.of("UTC")); + LocalDateTime windowStartTime = currentTime.truncatedTo(ChronoUnit.HOURS); + while (!windowStartTime.plusMinutes(windowSize.getMinutes()).isAfter(currentTime)) { + windowStartTime = windowStartTime.plusMinutes(windowSize.getMinutes()); + } + return windowStartTime.toInstant(ZoneOffset.UTC).getEpochSecond() * 1000; + } + + /** + * Get the current top queries snapshot from the AtomicReference. + * + * @return a list of {@link SearchQueryRecord} + */ + public List<SearchQueryRecord> getTopQueriesCurrentSnapshot() { + return topQueriesCurrentSnapshot.get(); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java new file mode 100644 index 0000000000000..5068f28234f6d --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/service/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Service Classes for Query Insights + */ +package org.opensearch.plugin.insights.core.service; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java new file mode 100644 index 0000000000000..04d1f9bfff7e1 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Base Package of Query Insights + */ +package org.opensearch.plugin.insights; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java new file mode 100644 index 0000000000000..c1d17edf9ff14 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/Attribute.java @@ -0,0 +1,74 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Locale; + +/** + * Valid attributes for a search query record + * + * @opensearch.internal + */ +public enum Attribute { + /** + * The search query type + */ + SEARCH_TYPE, + /** + * The search query source + */ + SOURCE, + /** + * Total shards queried + */ + TOTAL_SHARDS, + /** + * The indices involved + */ + INDICES, + /** + * The per phase level latency map for a search query + */ + PHASE_LATENCY_MAP, + /** + * The node id for this request + */ + NODE_ID; + + /** + * Read an Attribute from a StreamInput + * + * @param in the StreamInput to read from + * @return Attribute + * @throws IOException IOException + */ + static Attribute readFromStream(final StreamInput in) throws IOException { + return Attribute.valueOf(in.readString().toUpperCase(Locale.ROOT)); + } + + /** + * Write Attribute to a StreamOutput + * + * @param out the StreamOutput to write + * @param attribute the Attribute to write + * @throws IOException IOException + */ + static void writeTo(final StreamOutput out, final Attribute attribute) throws IOException { + out.writeString(attribute.toString()); + } + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java new file mode 100644 index 0000000000000..cdd090fbf4804 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/MetricType.java @@ -0,0 +1,121 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.Locale; +import java.util.Set; +import java.util.stream.Collectors; + +/** + * Valid metric types for a search query record + * + * @opensearch.internal + */ +public enum MetricType implements Comparator<Number> { + /** + * Latency metric type + */ + LATENCY, + /** + * CPU usage metric type + */ + CPU, + /** + * JVM heap usage metric type + */ + JVM; + + /** + * Read a MetricType from a StreamInput + * + * @param in the StreamInput to read from + * @return MetricType + * @throws IOException IOException + */ + public static MetricType readFromStream(final StreamInput in) throws IOException { + return fromString(in.readString()); + } + + /** + * Create MetricType from String + * + * @param metricType the String representation of MetricType + * @return MetricType + */ + public static MetricType fromString(final String metricType) { + return MetricType.valueOf(metricType.toUpperCase(Locale.ROOT)); + } + + /** + * Write MetricType to a StreamOutput + * + * @param out the StreamOutput to write + * @param metricType the MetricType to write + * @throws IOException IOException + */ + static void writeTo(final StreamOutput out, final MetricType metricType) throws IOException { + out.writeString(metricType.toString()); + } + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + /** + * Get all valid metrics + * + * @return A set of String that contains all valid metrics + */ + public static Set<MetricType> allMetricTypes() { + return Arrays.stream(values()).collect(Collectors.toSet()); + } + + /** + * Compare two numbers based on the metric type + * + * @param a the first Number to be compared. + * @param b the second Number to be compared. + * @return a negative integer, zero, or a positive integer as the first argument is less than, equal to, or greater than the second + */ + public int compare(final Number a, final Number b) { + switch (this) { + case LATENCY: + return Long.compare(a.longValue(), b.longValue()); + case JVM: + case CPU: + return Double.compare(a.doubleValue(), b.doubleValue()); + } + return -1; + } + + /** + * Parse a value with the correct type based on MetricType + * + * @param o the generic object to parse + * @return {@link Number} + */ + Number parseValue(final Object o) { + switch (this) { + case LATENCY: + return (Long) o; + case JVM: + case CPU: + return (Double) o; + default: + return (Number) o; + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java new file mode 100644 index 0000000000000..060711edb5580 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecord.java @@ -0,0 +1,176 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.common.io.stream.Writeable; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +/** + * SearchQueryRecord represents a minimal atomic record stored in the Query Insight Framework, + * which contains extensive information related to a search query. + * + * @opensearch.internal + */ +public class SearchQueryRecord implements ToXContentObject, Writeable { + private final long timestamp; + private final Map<MetricType, Number> measurements; + private final Map<Attribute, Object> attributes; + + /** + * Constructor of SearchQueryRecord + * + * @param in the StreamInput to read the SearchQueryRecord from + * @throws IOException IOException + * @throws ClassCastException ClassCastException + */ + public SearchQueryRecord(final StreamInput in) throws IOException, ClassCastException { + this.timestamp = in.readLong(); + measurements = new HashMap<>(); + in.readMap(MetricType::readFromStream, StreamInput::readGenericValue) + .forEach(((metricType, o) -> measurements.put(metricType, metricType.parseValue(o)))); + this.attributes = in.readMap(Attribute::readFromStream, StreamInput::readGenericValue); + } + + /** + * Constructor of SearchQueryRecord + * + * @param timestamp The timestamp of the query. + * @param measurements A list of Measurement associated with this query + * @param attributes A list of Attributes associated with this query + */ + public SearchQueryRecord(final long timestamp, Map<MetricType, Number> measurements, final Map<Attribute, Object> attributes) { + if (measurements == null) { + throw new IllegalArgumentException("Measurements cannot be null"); + } + this.measurements = measurements; + this.attributes = attributes; + this.timestamp = timestamp; + } + + /** + * Returns the observation time of the metric. + * + * @return the observation time in milliseconds + */ + public long getTimestamp() { + return timestamp; + } + + /** + * Returns the measurement associated with the specified name. + * + * @param name the name of the measurement + * @return the measurement object, or null if not found + */ + public Number getMeasurement(final MetricType name) { + return measurements.get(name); + } + + /** + * Returns a map of all the measurements associated with the metric. + * + * @return a map of measurement names to measurement objects + */ + public Map<MetricType, Number> getMeasurements() { + return measurements; + } + + /** + * Returns a map of the attributes associated with the metric. + * + * @return a map of attribute keys to attribute values + */ + public Map<Attribute, Object> getAttributes() { + return attributes; + } + + /** + * Add an attribute to this record + * + * @param attribute attribute to add + * @param value the value associated with the attribute + */ + public void addAttribute(final Attribute attribute, final Object value) { + attributes.put(attribute, value); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final ToXContent.Params params) throws IOException { + builder.startObject(); + builder.field("timestamp", timestamp); + for (Map.Entry<Attribute, Object> entry : attributes.entrySet()) { + builder.field(entry.getKey().toString(), entry.getValue()); + } + for (Map.Entry<MetricType, Number> entry : measurements.entrySet()) { + builder.field(entry.getKey().toString(), entry.getValue()); + } + return builder.endObject(); + } + + /** + * Write a SearchQueryRecord to a StreamOutput + * + * @param out the StreamOutput to write + * @throws IOException IOException + */ + @Override + public void writeTo(final StreamOutput out) throws IOException { + out.writeLong(timestamp); + out.writeMap(measurements, (stream, metricType) -> MetricType.writeTo(out, metricType), StreamOutput::writeGenericValue); + out.writeMap(attributes, (stream, attribute) -> Attribute.writeTo(out, attribute), StreamOutput::writeGenericValue); + } + + /** + * Compare two SearchQueryRecord, based on the given MetricType + * + * @param a the first SearchQueryRecord to compare + * @param b the second SearchQueryRecord to compare + * @param metricType the MetricType to compare on + * @return 0 if the first SearchQueryRecord is numerically equal to the second SearchQueryRecord; + * -1 if the first SearchQueryRecord is numerically less than the second SearchQueryRecord; + * 1 if the first SearchQueryRecord is numerically greater than the second SearchQueryRecord. + */ + public static int compare(final SearchQueryRecord a, final SearchQueryRecord b, final MetricType metricType) { + return metricType.compare(a.getMeasurement(metricType), b.getMeasurement(metricType)); + } + + /** + * Check if a SearchQueryRecord is deep equal to another record + * + * @param o the other SearchQueryRecord record + * @return true if two records are deep equal, false otherwise. + */ + @Override + public boolean equals(final Object o) { + if (this == o) { + return true; + } + if (!(o instanceof SearchQueryRecord)) { + return false; + } + final SearchQueryRecord other = (SearchQueryRecord) o; + return timestamp == other.getTimestamp() + && measurements.equals(other.getMeasurements()) + && attributes.size() == other.getAttributes().size(); + } + + @Override + public int hashCode() { + return Objects.hash(timestamp, measurements, attributes); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java new file mode 100644 index 0000000000000..c59ec1550f54b --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/model/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Data Models for Query Insight Records + */ +package org.opensearch.plugin.insights.rules.model; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java new file mode 100644 index 0000000000000..52cc1fbde790f --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/QueryInsightsSettings.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.settings; + +import org.opensearch.common.settings.Setting; +import org.opensearch.common.unit.TimeValue; + +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.TimeUnit; + +/** + * Settings for Query Insights Plugin + * + * @opensearch.api + * @opensearch.experimental + */ +public class QueryInsightsSettings { + /** + * Executors settings + */ + public static final String QUERY_INSIGHTS_EXECUTOR = "query_insights_executor"; + /** + * Max number of thread + */ + public static final int MAX_THREAD_COUNT = 5; + /** + * Max number of requests for the consumer to collect at one time + */ + public static final int QUERY_RECORD_QUEUE_CAPACITY = 1000; + /** + * Time interval for record queue consumer to run + */ + public static final TimeValue QUERY_RECORD_QUEUE_DRAIN_INTERVAL = new TimeValue(5, TimeUnit.SECONDS); + /** + * Default Values and Settings + */ + public static final TimeValue MAX_WINDOW_SIZE = new TimeValue(1, TimeUnit.DAYS); + /** + * Minimal window size + */ + public static final TimeValue MIN_WINDOW_SIZE = new TimeValue(1, TimeUnit.MINUTES); + /** + * Valid window sizes + */ + public static final Set<TimeValue> VALID_WINDOW_SIZES_IN_MINUTES = new HashSet<>( + Arrays.asList( + new TimeValue(1, TimeUnit.MINUTES), + new TimeValue(5, TimeUnit.MINUTES), + new TimeValue(10, TimeUnit.MINUTES), + new TimeValue(30, TimeUnit.MINUTES) + ) + ); + + /** Default N size for top N queries */ + public static final int MAX_N_SIZE = 100; + /** Default window size in seconds to keep the top N queries with latency data in query insight store */ + public static final TimeValue DEFAULT_WINDOW_SIZE = new TimeValue(60, TimeUnit.SECONDS); + /** Default top N size to keep the data in query insight store */ + public static final int DEFAULT_TOP_N_SIZE = 3; + /** + * Query Insights base uri + */ + public static final String PLUGINS_BASE_URI = "/_insights"; + + /** + * Settings for Top Queries + * + */ + public static final String TOP_QUERIES_BASE_URI = PLUGINS_BASE_URI + "/top_queries"; + /** Default prefix for top N queries feature */ + public static final String TOP_N_QUERIES_SETTING_PREFIX = "search.insights.top_queries"; + /** Default prefix for top N queries by latency feature */ + public static final String TOP_N_LATENCY_QUERIES_PREFIX = TOP_N_QUERIES_SETTING_PREFIX + ".latency"; + /** + * Boolean setting for enabling top queries by latency. + */ + public static final Setting<Boolean> TOP_N_LATENCY_QUERIES_ENABLED = Setting.boolSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".enabled", + false, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * Int setting to define the top n size for top queries by latency. + */ + public static final Setting<Integer> TOP_N_LATENCY_QUERIES_SIZE = Setting.intSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".top_n_size", + DEFAULT_TOP_N_SIZE, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + + /** + * Time setting to define the window size in seconds for top queries by latency. + */ + public static final Setting<TimeValue> TOP_N_LATENCY_QUERIES_WINDOW_SIZE = Setting.positiveTimeSetting( + TOP_N_LATENCY_QUERIES_PREFIX + ".window_size", + DEFAULT_WINDOW_SIZE, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + /** + * Default constructor + */ + public QueryInsightsSettings() {} +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java new file mode 100644 index 0000000000000..f3152bbf966cb --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/settings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Settings for Query Insights Plugin + */ +package org.opensearch.plugin.insights.settings; diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java new file mode 100644 index 0000000000000..f6d95450855de --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java @@ -0,0 +1,91 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.ActionRequest; +import org.opensearch.client.Client; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.action.ActionResponse; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.plugins.ActionPlugin; +import org.opensearch.rest.RestHandler; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.junit.Before; + +import java.util.Arrays; +import java.util.List; + +import static org.mockito.Mockito.mock; + +public class QueryInsightsPluginTests extends OpenSearchTestCase { + + private QueryInsightsPlugin queryInsightsPlugin; + + private final Client client = mock(Client.class); + private ClusterService clusterService; + private final ThreadPool threadPool = mock(ThreadPool.class); + + @Before + public void setup() { + queryInsightsPlugin = new QueryInsightsPlugin(); + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + + clusterService = new ClusterService(settings, clusterSettings, threadPool); + + } + + public void testGetSettings() { + assertEquals( + Arrays.asList( + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE, + QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE + ), + queryInsightsPlugin.getSettings() + ); + } + + public void testCreateComponent() { + List<Object> components = (List<Object>) queryInsightsPlugin.createComponents( + client, + clusterService, + threadPool, + null, + null, + null, + null, + null, + null, + null, + null + ); + assertEquals(1, components.size()); + assertTrue(components.get(0) instanceof QueryInsightsService); + } + + public void testGetRestHandlers() { + List<RestHandler> components = queryInsightsPlugin.getRestHandlers(Settings.EMPTY, null, null, null, null, null, null); + assertEquals(0, components.size()); + } + + public void testGetActions() { + List<ActionPlugin.ActionHandler<? extends ActionRequest, ? extends ActionResponse>> components = queryInsightsPlugin.getActions(); + assertEquals(0, components.size()); + } + +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java new file mode 100644 index 0000000000000..04f49d5fbc7dd --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java @@ -0,0 +1,155 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.search.SearchType; +import org.opensearch.common.util.Maps; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.TreeSet; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLengthBetween; +import static org.opensearch.test.OpenSearchTestCase.randomArray; +import static org.opensearch.test.OpenSearchTestCase.randomDouble; +import static org.opensearch.test.OpenSearchTestCase.randomIntBetween; +import static org.opensearch.test.OpenSearchTestCase.randomLong; +import static org.opensearch.test.OpenSearchTestCase.randomLongBetween; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +final public class QueryInsightsTestUtils { + + public QueryInsightsTestUtils() {} + + public static List<SearchQueryRecord> generateQueryInsightRecords(int count) { + return generateQueryInsightRecords(count, count, System.currentTimeMillis(), 0); + } + + /** + * Creates a List of random Query Insight Records for testing purpose + */ + public static List<SearchQueryRecord> generateQueryInsightRecords(int lower, int upper, long startTimeStamp, long interval) { + List<SearchQueryRecord> records = new ArrayList<>(); + int countOfRecords = randomIntBetween(lower, upper); + long timestamp = startTimeStamp; + for (int i = 0; i < countOfRecords; ++i) { + Map<MetricType, Number> measurements = Map.of( + MetricType.LATENCY, + randomLongBetween(1000, 10000), + MetricType.CPU, + randomDouble(), + MetricType.JVM, + randomDouble() + ); + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + int countOfPhases = randomIntBetween(2, 5); + for (int j = 0; j < countOfPhases; ++j) { + phaseLatencyMap.put(randomAlphaOfLengthBetween(5, 10), randomLong()); + } + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, SearchType.QUERY_THEN_FETCH.toString().toLowerCase(Locale.ROOT)); + attributes.put(Attribute.SOURCE, "{\"size\":20}"); + attributes.put(Attribute.TOTAL_SHARDS, randomIntBetween(1, 100)); + attributes.put(Attribute.INDICES, randomArray(1, 3, Object[]::new, () -> randomAlphaOfLengthBetween(5, 10))); + attributes.put(Attribute.PHASE_LATENCY_MAP, phaseLatencyMap); + + records.add(new SearchQueryRecord(timestamp, measurements, attributes)); + timestamp += interval; + } + return records; + } + + public static SearchQueryRecord createFixedSearchQueryRecord() { + long timestamp = 1706574180000L; + Map<MetricType, Number> measurements = Map.of(MetricType.LATENCY, 1L); + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, SearchType.QUERY_THEN_FETCH.toString().toLowerCase(Locale.ROOT)); + + return new SearchQueryRecord(timestamp, measurements, attributes); + } + + public static void compareJson(ToXContent param1, ToXContent param2) throws IOException { + if (param1 == null || param2 == null) { + assertNull(param1); + assertNull(param2); + return; + } + + ToXContent.Params params = ToXContent.EMPTY_PARAMS; + XContentBuilder param1Builder = jsonBuilder(); + param1.toXContent(param1Builder, params); + + XContentBuilder param2Builder = jsonBuilder(); + param2.toXContent(param2Builder, params); + + assertEquals(param1Builder.toString(), param2Builder.toString()); + } + + @SuppressWarnings("unchecked") + public static boolean checkRecordsEquals(List<SearchQueryRecord> records1, List<SearchQueryRecord> records2) { + if (records1.size() != records2.size()) { + return false; + } + for (int i = 0; i < records1.size(); i++) { + if (!records1.get(i).equals(records2.get(i))) { + return false; + } + Map<Attribute, Object> attributes1 = records1.get(i).getAttributes(); + Map<Attribute, Object> attributes2 = records2.get(i).getAttributes(); + for (Map.Entry<Attribute, Object> entry : attributes1.entrySet()) { + Attribute attribute = entry.getKey(); + Object value = entry.getValue(); + if (!attributes2.containsKey(attribute)) { + return false; + } + if (value instanceof Object[] && !Arrays.deepEquals((Object[]) value, (Object[]) attributes2.get(attribute))) { + return false; + } else if (value instanceof Map + && !Maps.deepEquals((Map<Object, Object>) value, (Map<Object, Object>) attributes2.get(attribute))) { + return false; + } + } + } + return true; + } + + public static boolean checkRecordsEqualsWithoutOrder( + List<SearchQueryRecord> records1, + List<SearchQueryRecord> records2, + MetricType metricType + ) { + Set<SearchQueryRecord> set2 = new TreeSet<>((a, b) -> SearchQueryRecord.compare(a, b, metricType)); + set2.addAll(records2); + if (records1.size() != records2.size()) { + return false; + } + for (int i = 0; i < records1.size(); i++) { + if (!set2.contains(records1.get(i))) { + return false; + } + } + return true; + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java new file mode 100644 index 0000000000000..c29b48b9690d1 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/QueryInsightsServiceTests.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.junit.Before; + +import static org.mockito.Mockito.mock; + +/** + * Unit Tests for {@link QueryInsightsService}. + */ +public class QueryInsightsServiceTests extends OpenSearchTestCase { + private final ThreadPool threadPool = mock(ThreadPool.class); + private QueryInsightsService queryInsightsService; + + @Before + public void setup() { + queryInsightsService = new QueryInsightsService(threadPool); + queryInsightsService.enableCollection(MetricType.LATENCY, true); + queryInsightsService.enableCollection(MetricType.CPU, true); + queryInsightsService.enableCollection(MetricType.JVM, true); + } + + public void testAddRecordToLimitAndDrain() { + SearchQueryRecord record = QueryInsightsTestUtils.generateQueryInsightRecords(1, 1, System.currentTimeMillis(), 0).get(0); + for (int i = 0; i < QueryInsightsSettings.QUERY_RECORD_QUEUE_CAPACITY; i++) { + assertTrue(queryInsightsService.addRecord(record)); + } + // exceed capacity + assertFalse(queryInsightsService.addRecord(record)); + queryInsightsService.drainRecords(); + assertEquals( + QueryInsightsSettings.DEFAULT_TOP_N_SIZE, + queryInsightsService.getTopQueriesService(MetricType.LATENCY).getTopQueriesRecords(false).size() + ); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java new file mode 100644 index 0000000000000..060df84a89485 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/service/TopQueriesServiceTests.java @@ -0,0 +1,102 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.service; + +import org.opensearch.cluster.coordination.DeterministicTaskQueue; +import org.opensearch.common.unit.TimeValue; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +/** + * Unit Tests for {@link QueryInsightsService}. + */ +public class TopQueriesServiceTests extends OpenSearchTestCase { + private TopQueriesService topQueriesService; + + @Before + public void setup() { + topQueriesService = new TopQueriesService(MetricType.LATENCY); + topQueriesService.setTopNSize(Integer.MAX_VALUE); + topQueriesService.setWindowSize(new TimeValue(Long.MAX_VALUE)); + topQueriesService.setEnabled(true); + } + + public void testIngestQueryDataWithLargeWindow() { + final List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + topQueriesService.consumeRecords(records); + assertTrue( + QueryInsightsTestUtils.checkRecordsEqualsWithoutOrder( + topQueriesService.getTopQueriesRecords(false), + records, + MetricType.LATENCY + ) + ); + } + + public void testRollingWindows() { + List<SearchQueryRecord> records; + // Create 5 records at Now - 10 minutes to make sure they belong to the last window + records = QueryInsightsTestUtils.generateQueryInsightRecords(5, 5, System.currentTimeMillis() - 1000 * 60 * 10, 0); + topQueriesService.setWindowSize(TimeValue.timeValueMinutes(10)); + topQueriesService.consumeRecords(records); + assertEquals(0, topQueriesService.getTopQueriesRecords(true).size()); + + // Create 10 records at now + 1 minute, to make sure they belong to the current window + records = QueryInsightsTestUtils.generateQueryInsightRecords(10, 10, System.currentTimeMillis() + 1000 * 60, 0); + topQueriesService.setWindowSize(TimeValue.timeValueMinutes(10)); + topQueriesService.consumeRecords(records); + assertEquals(10, topQueriesService.getTopQueriesRecords(true).size()); + } + + public void testSmallNSize() { + final List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + topQueriesService.setTopNSize(1); + topQueriesService.consumeRecords(records); + assertEquals(1, topQueriesService.getTopQueriesRecords(false).size()); + } + + public void testValidateTopNSize() { + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateTopNSize(QueryInsightsSettings.MAX_N_SIZE + 1); }); + } + + public void testGetTopQueriesWhenNotEnabled() { + topQueriesService.setEnabled(false); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.getTopQueriesRecords(false); }); + } + + public void testValidateWindowSize() { + assertThrows(IllegalArgumentException.class, () -> { + topQueriesService.validateWindowSize(new TimeValue(QueryInsightsSettings.MAX_WINDOW_SIZE.getSeconds() + 1, TimeUnit.SECONDS)); + }); + assertThrows(IllegalArgumentException.class, () -> { + topQueriesService.validateWindowSize(new TimeValue(QueryInsightsSettings.MIN_WINDOW_SIZE.getSeconds() - 1, TimeUnit.SECONDS)); + }); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateWindowSize(new TimeValue(2, TimeUnit.DAYS)); }); + assertThrows(IllegalArgumentException.class, () -> { topQueriesService.validateWindowSize(new TimeValue(7, TimeUnit.MINUTES)); }); + } + + private static void runUntilTimeoutOrFinish(DeterministicTaskQueue deterministicTaskQueue, long duration) { + final long endTime = deterministicTaskQueue.getCurrentTimeMillis() + duration; + while (deterministicTaskQueue.getCurrentTimeMillis() < endTime + && (deterministicTaskQueue.hasRunnableTasks() || deterministicTaskQueue.hasDeferredTasks())) { + if (deterministicTaskQueue.hasDeferredTasks() && randomBoolean()) { + deterministicTaskQueue.advanceTime(); + } else if (deterministicTaskQueue.hasRunnableTasks()) { + deterministicTaskQueue.runRandomTask(); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java new file mode 100644 index 0000000000000..793d5878e2300 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/model/SearchQueryRecordTests.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.model; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +/** + * Granular tests for the {@link SearchQueryRecord} class. + */ +public class SearchQueryRecordTests extends OpenSearchTestCase { + + /** + * Check that if the serialization, deserialization and equals functions are working as expected + */ + public void testSerializationAndEquals() throws Exception { + List<SearchQueryRecord> records = QueryInsightsTestUtils.generateQueryInsightRecords(10); + List<SearchQueryRecord> copiedRecords = new ArrayList<>(); + for (SearchQueryRecord record : records) { + copiedRecords.add(roundTripRecord(record)); + } + assertTrue(QueryInsightsTestUtils.checkRecordsEquals(records, copiedRecords)); + + } + + public void testAllMetricTypes() { + Set<MetricType> allMetrics = MetricType.allMetricTypes(); + Set<MetricType> expected = new HashSet<>(Arrays.asList(MetricType.LATENCY, MetricType.CPU, MetricType.JVM)); + assertEquals(expected, allMetrics); + } + + public void testCompare() { + SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + assertEquals(0, SearchQueryRecord.compare(record1, record2, MetricType.LATENCY)); + } + + public void testEqual() { + SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord(); + assertEquals(record1, record2); + } + + /** + * Serialize and deserialize a SearchQueryRecord. + * @param record A SearchQueryRecord to serialize. + * @return The deserialized, "round-tripped" record. + */ + private static SearchQueryRecord roundTripRecord(SearchQueryRecord record) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + record.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new SearchQueryRecord(in); + } + } + } +} diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequest.java b/server/src/main/java/org/opensearch/action/search/SearchRequest.java index 96cea17ff4972..f738c182c06da 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequest.java @@ -359,7 +359,7 @@ boolean isFinalReduce() { * request. When created through {@link #subSearchRequest(SearchRequest, String[], String, long, boolean)}, this method returns * the provided current time, otherwise it will return {@link System#currentTimeMillis()}. */ - long getOrCreateAbsoluteStartMillis() { + public long getOrCreateAbsoluteStartMillis() { return absoluteStartMillis == DEFAULT_ABSOLUTE_START_MILLIS ? System.currentTimeMillis() : absoluteStartMillis; } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java index eceac7204b196..383d9b5e82fe2 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestContext.java @@ -22,7 +22,7 @@ * @opensearch.internal */ @InternalApi -class SearchRequestContext { +public class SearchRequestContext { private final SearchRequestOperationsListener searchRequestOperationsListener; private long absoluteStartNanos; private final Map<String, Long> phaseTookMap; @@ -47,7 +47,7 @@ void updatePhaseTookMap(String phaseName, Long tookTime) { this.phaseTookMap.put(phaseName, tookTime); } - Map<String, Long> phaseTookMap() { + public Map<String, Long> phaseTookMap() { return phaseTookMap; } @@ -70,7 +70,7 @@ void setAbsoluteStartNanos(long absoluteStartNanos) { /** * Request start time in nanos */ - long getAbsoluteStartNanos() { + public long getAbsoluteStartNanos() { return absoluteStartNanos; } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java index 2a09cc084f79f..2acb35af667f0 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestOperationsListener.java @@ -31,21 +31,21 @@ protected SearchRequestOperationsListener(final boolean enabled) { this.enabled = enabled; } - abstract void onPhaseStart(SearchPhaseContext context); + protected abstract void onPhaseStart(SearchPhaseContext context); - abstract void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext); + protected abstract void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext); - abstract void onPhaseFailure(SearchPhaseContext context); + protected abstract void onPhaseFailure(SearchPhaseContext context); - void onRequestStart(SearchRequestContext searchRequestContext) {} + protected void onRequestStart(SearchRequestContext searchRequestContext) {} - void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} - boolean isEnabled(SearchRequest searchRequest) { + protected boolean isEnabled(SearchRequest searchRequest) { return isEnabled(); } - boolean isEnabled() { + protected boolean isEnabled() { return enabled; } @@ -69,7 +69,7 @@ static final class CompositeListener extends SearchRequestOperationsListener { } @Override - void onPhaseStart(SearchPhaseContext context) { + protected void onPhaseStart(SearchPhaseContext context) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseStart(context); @@ -80,7 +80,7 @@ void onPhaseStart(SearchPhaseContext context) { } @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseEnd(context, searchRequestContext); @@ -91,7 +91,7 @@ void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestCo } @Override - void onPhaseFailure(SearchPhaseContext context) { + protected void onPhaseFailure(SearchPhaseContext context) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onPhaseFailure(context); @@ -102,7 +102,7 @@ void onPhaseFailure(SearchPhaseContext context) { } @Override - void onRequestStart(SearchRequestContext searchRequestContext) { + protected void onRequestStart(SearchRequestContext searchRequestContext) { for (SearchRequestOperationsListener listener : listeners) { try { listener.onRequestStart(searchRequestContext); diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java index 7f25f9026f215..74e04d976cb1c 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestSlowLog.java @@ -134,19 +134,19 @@ public SearchRequestSlowLog(ClusterService clusterService) { } @Override - void onPhaseStart(SearchPhaseContext context) {} + protected void onPhaseStart(SearchPhaseContext context) {} @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} @Override - void onPhaseFailure(SearchPhaseContext context) {} + protected void onPhaseFailure(SearchPhaseContext context) {} @Override - void onRequestStart(SearchRequestContext searchRequestContext) {} + protected void onRequestStart(SearchRequestContext searchRequestContext) {} @Override - void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onRequestEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { long tookInNanos = System.nanoTime() - searchRequestContext.getAbsoluteStartNanos(); if (warnThreshold >= 0 && tookInNanos > warnThreshold && level.isLevelEnabledFor(SlowLogLevel.WARN)) { diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java index 88d599a0dcdaa..ac32b08afb7f6 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestStats.java @@ -58,12 +58,12 @@ public long getPhaseMetric(SearchPhaseName searchPhaseName) { } @Override - void onPhaseStart(SearchPhaseContext context) { + protected void onPhaseStart(SearchPhaseContext context) { phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()).current.inc(); } @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) { StatsHolder phaseStats = phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()); phaseStats.current.dec(); phaseStats.total.inc(); @@ -71,7 +71,7 @@ void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestCo } @Override - void onPhaseFailure(SearchPhaseContext context) { + protected void onPhaseFailure(SearchPhaseContext context) { phaseStatsMap.get(context.getCurrentPhase().getSearchPhaseName()).current.dec(); } diff --git a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java index 78c5ba4412c68..1cb336e18b12c 100644 --- a/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java +++ b/server/src/test/java/org/opensearch/action/search/SearchRequestOperationsCompositeListenerFactoryTests.java @@ -119,13 +119,13 @@ public void testStandardListenerAndPerRequestListenerDisabled() { public SearchRequestOperationsListener createTestSearchRequestOperationsListener() { return new SearchRequestOperationsListener() { @Override - void onPhaseStart(SearchPhaseContext context) {} + protected void onPhaseStart(SearchPhaseContext context) {} @Override - void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + protected void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} @Override - void onPhaseFailure(SearchPhaseContext context) {} + protected void onPhaseFailure(SearchPhaseContext context) {} }; } } From 554cbf7bfd83d94e1b1b69528ce0d128454754f4 Mon Sep 17 00:00:00 2001 From: Chenyang Ji <cyji@amazon.com> Date: Mon, 5 Feb 2024 22:18:08 -0800 Subject: [PATCH 175/178] Top N Queries by latency implementation (#11904) * Top N Queries by latency implementation * Increase JavaDoc coverage and update PR based comments * Refactor record and service to make them generic * refactor service for improving multithreading efficiency * rebase from master to pick up query insights plugin changes --------- Signed-off-by: Chenyang Ji <cyji@amazon.com> --- CHANGELOG.md | 1 + .../QueryInsightsPluginTransportIT.java | 274 ++++++++++++++++++ .../plugin/insights/TopQueriesRestIT.java | 107 +++++++ .../plugin/insights/QueryInsightsPlugin.java | 10 +- .../core/listener/QueryInsightsListener.java | 147 ++++++++++ .../insights/core/listener/package-info.java | 12 + .../insights/rules/action/package-info.java | 12 + .../rules/action/top_queries/TopQueries.java | 77 +++++ .../action/top_queries/TopQueriesAction.java | 32 ++ .../action/top_queries/TopQueriesRequest.java | 62 ++++ .../top_queries/TopQueriesResponse.java | 143 +++++++++ .../action/top_queries/package-info.java | 12 + .../rules/resthandler/package-info.java | 12 + .../top_queries/RestTopQueriesAction.java | 99 +++++++ .../resthandler/top_queries/package-info.java | 12 + .../rules/transport/package-info.java | 12 + .../TransportTopQueriesAction.java | 155 ++++++++++ .../transport/top_queries/package-info.java | 12 + .../insights/QueryInsightsPluginTests.java | 22 +- .../insights/QueryInsightsTestUtils.java | 34 +++ .../listener/QueryInsightsListenerTests.java | 161 ++++++++++ .../top_queries/TopQueriesRequestTests.java | 43 +++ .../top_queries/TopQueriesResponseTests.java | 71 +++++ .../action/top_queries/TopQueriesTests.java | 35 +++ .../RestTopQueriesActionTests.java | 70 +++++ .../TransportTopQueriesActionTests.java | 84 ++++++ 26 files changed, 1705 insertions(+), 6 deletions(-) create mode 100644 plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java create mode 100644 plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java create mode 100644 plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java create mode 100644 plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 5afdf6b707d1e..6b78222aa742f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -224,6 +224,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Workaround for https://bugs.openjdk.org/browse/JDK-8323659 regression, introduced in JDK-21.0.2 ([#11968](https://github.com/opensearch-project/OpenSearch/pull/11968)) - Updates IpField to be searchable when only `doc_values` are enabled ([#11508](https://github.com/opensearch-project/OpenSearch/pull/11508)) - [Query Insights] Query Insights Framework which currently supports retrieving the most time-consuming queries within the last configured time window ([#11903](https://github.com/opensearch-project/OpenSearch/pull/11903)) +- [Query Insights] Implement Top N Queries feature to collect and gather information about high latency queries in a window ([#11904](https://github.com/opensearch-project/OpenSearch/pull/11904)) ### Deprecated diff --git a/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java b/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java new file mode 100644 index 0000000000000..04e715444f50a --- /dev/null +++ b/plugins/query-insights/src/internalClusterTest/java/org/opensearch/plugin/insights/QueryInsightsPluginTransportIT.java @@ -0,0 +1,274 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; +import org.opensearch.action.admin.cluster.node.info.NodeInfo; +import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; +import org.opensearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; +import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.opensearch.action.index.IndexResponse; +import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.settings.Settings; +import org.opensearch.index.query.QueryBuilders; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugins.Plugin; +import org.opensearch.plugins.PluginInfo; +import org.opensearch.test.OpenSearchIntegTestCase; +import org.junit.Assert; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.concurrent.ExecutionException; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +/** + * Transport Action tests for Query Insights Plugin + */ + +@OpenSearchIntegTestCase.ClusterScope(numDataNodes = 0, scope = OpenSearchIntegTestCase.Scope.TEST) +public class QueryInsightsPluginTransportIT extends OpenSearchIntegTestCase { + + private final int TOTAL_NUMBER_OF_NODES = 2; + private final int TOTAL_SEARCH_REQUESTS = 5; + + @Override + protected Collection<Class<? extends Plugin>> nodePlugins() { + return Arrays.asList(QueryInsightsPlugin.class); + } + + /** + * Test Query Insights Plugin is installed + */ + public void testQueryInsightPluginInstalled() { + NodesInfoRequest nodesInfoRequest = new NodesInfoRequest(); + nodesInfoRequest.addMetric(NodesInfoRequest.Metric.PLUGINS.metricName()); + NodesInfoResponse nodesInfoResponse = OpenSearchIntegTestCase.client().admin().cluster().nodesInfo(nodesInfoRequest).actionGet(); + List<PluginInfo> pluginInfos = nodesInfoResponse.getNodes() + .stream() + .flatMap( + (Function<NodeInfo, Stream<PluginInfo>>) nodeInfo -> nodeInfo.getInfo(PluginsAndModules.class).getPluginInfos().stream() + ) + .collect(Collectors.toList()); + Assert.assertTrue( + pluginInfos.stream().anyMatch(pluginInfo -> pluginInfo.getName().equals("org.opensearch.plugin.insights.QueryInsightsPlugin")) + ); + } + + /** + * Test get top queries when feature disabled + */ + public void testGetTopQueriesWhenFeatureDisabled() { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertNotEquals(0, response.failures().size()); + Assert.assertEquals( + "Cannot get top n queries for [latency] when it is not enabled.", + response.failures().get(0).getCause().getCause().getMessage() + ); + } + + /** + * Test update top query record when feature enabled + */ + public void testUpdateRecordWhenFeatureDisabledThenEnabled() throws ExecutionException, InterruptedException { + Settings commonSettings = Settings.builder().put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "false").build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertNotEquals(0, response.failures().size()); + Assert.assertEquals( + "Cannot get top n queries for [latency] when it is not enabled.", + response.failures().get(0).getCause().getCause().getMessage() + ); + + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest().persistentSettings( + Settings.builder().put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true").build() + ); + assertAcked(internalCluster().client().admin().cluster().updateSettings(updateSettingsRequest).get()); + TopQueriesRequest request2 = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response2 = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request2).actionGet(); + Assert.assertEquals(0, response2.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response2.getNodes().size()); + for (int i = 0; i < TOTAL_NUMBER_OF_NODES; i++) { + Assert.assertEquals(0, response2.getNodes().get(i).getTopQueriesRecord().size()); + } + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries when feature enabled + */ + public void testGetTopQueriesWhenFeatureEnabled() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "100") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "600s") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + // Sleep to wait for queue drained to top queries store + Thread.sleep(6000); + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Assert.assertEquals(TOTAL_SEARCH_REQUESTS, response.getNodes().stream().mapToInt(o -> o.getTopQueriesRecord().size()).sum()); + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries with small top n size + */ + public void testGetTopQueriesWithSmallTopN() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "1") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "600s") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + Thread.sleep(6000); + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Assert.assertEquals(2, response.getNodes().stream().mapToInt(o -> o.getTopQueriesRecord().size()).sum()); + + internalCluster().stopAllNodes(); + } + + /** + * Test get top queries with small window size + */ + public void testGetTopQueriesWithSmallWindowSize() throws InterruptedException { + Settings commonSettings = Settings.builder() + .put(TOP_N_LATENCY_QUERIES_ENABLED.getKey(), "true") + .put(TOP_N_LATENCY_QUERIES_SIZE.getKey(), "100") + .put(TOP_N_LATENCY_QUERIES_WINDOW_SIZE.getKey(), "1m") + .build(); + + logger.info("--> starting nodes for query insight testing"); + List<String> nodes = internalCluster().startNodes(TOTAL_NUMBER_OF_NODES, Settings.builder().put(commonSettings).build()); + + logger.info("--> waiting for nodes to form a cluster"); + ClusterHealthResponse health = client().admin().cluster().prepareHealth().setWaitForNodes("2").execute().actionGet(); + assertFalse(health.isTimedOut()); + + assertAcked( + prepareCreate("test").setSettings(Settings.builder().put("index.number_of_shards", 2).put("index.number_of_replicas", 2)) + ); + ensureStableCluster(2); + logger.info("--> creating indices for query insight testing"); + for (int i = 0; i < 5; i++) { + IndexResponse response = client().prepareIndex("test_" + i).setId("" + i).setSource("field_" + i, "value_" + i).get(); + assertEquals("CREATED", response.status().toString()); + } + // making search requests to get top queries + for (int i = 0; i < TOTAL_SEARCH_REQUESTS; i++) { + SearchResponse searchResponse = internalCluster().client(randomFrom(nodes)) + .prepareSearch() + .setQuery(QueryBuilders.matchAllQuery()) + .get(); + assertEquals(searchResponse.getFailedShards(), 0); + } + + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + TopQueriesResponse response = OpenSearchIntegTestCase.client().execute(TopQueriesAction.INSTANCE, request).actionGet(); + Assert.assertEquals(0, response.failures().size()); + Assert.assertEquals(TOTAL_NUMBER_OF_NODES, response.getNodes().size()); + Thread.sleep(6000); + internalCluster().stopAllNodes(); + } +} diff --git a/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java b/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java new file mode 100644 index 0000000000000..57dea6ad8d5ff --- /dev/null +++ b/plugins/query-insights/src/javaRestTest/java/org/opensearch/plugin/insights/TopQueriesRestIT.java @@ -0,0 +1,107 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights; + +import org.opensearch.client.Request; +import org.opensearch.client.Response; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.json.JsonXContent; +import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.test.rest.OpenSearchRestTestCase; +import org.junit.Assert; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Map; + +/** + * Rest Action tests for Query Insights + */ +public class TopQueriesRestIT extends OpenSearchRestTestCase { + + /** + * test Query Insights is installed + * @throws IOException IOException + */ + @SuppressWarnings("unchecked") + public void testQueryInsightsPluginInstalled() throws IOException { + Request request = new Request("GET", "/_cat/plugins?s=component&h=name,component,version,description&format=json"); + Response response = client().performRequest(request); + List<Object> pluginsList = JsonXContent.jsonXContent.createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + response.getEntity().getContent() + ).list(); + Assert.assertTrue( + pluginsList.stream().map(o -> (Map<String, Object>) o).anyMatch(plugin -> plugin.get("component").equals("query-insights")) + ); + } + + /** + * test enabling top queries + * @throws IOException IOException + */ + public void testTopQueriesResponses() throws IOException { + // Enable Top N Queries feature + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(defaultTopQueriesSettings()); + Response response = client().performRequest(request); + + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Create documents for search + request = new Request("POST", "/my-index-0/_doc"); + request.setJsonEntity(createDocumentsBody()); + response = client().performRequest(request); + + Assert.assertEquals(201, response.getStatusLine().getStatusCode()); + + // Do Search + request = new Request("GET", "/my-index-0/_search?size=20&pretty"); + request.setJsonEntity(searchBody()); + response = client().performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + response = client().performRequest(request); + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + + // Get Top Queries + request = new Request("GET", "/_insights/top_queries?pretty"); + response = client().performRequest(request); + + Assert.assertEquals(200, response.getStatusLine().getStatusCode()); + String top_requests = new String(response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + Assert.assertTrue(top_requests.contains("top_queries")); + Assert.assertEquals(2, top_requests.split("searchType", -1).length - 1); + } + + private String defaultTopQueriesSettings() { + return "{\n" + + " \"persistent\" : {\n" + + " \"search.top_n_queries.latency.enabled\" : \"true\",\n" + + " \"search.top_n_queries.latency.window_size\" : \"600s\",\n" + + " \"search.top_n_queries.latency.top_n_size\" : 5\n" + + " }\n" + + "}"; + } + + private String createDocumentsBody() { + return "{\n" + + " \"@timestamp\": \"2099-11-15T13:12:00\",\n" + + " \"message\": \"this is document 1\",\n" + + " \"user\": {\n" + + " \"id\": \"cyji\"\n" + + " }\n" + + "}"; + } + + private String searchBody() { + return "{}"; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java index 0acd7e2dcac0d..4d7e0d486068a 100644 --- a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/QueryInsightsPlugin.java @@ -25,7 +25,11 @@ import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.env.Environment; import org.opensearch.env.NodeEnvironment; +import org.opensearch.plugin.insights.core.listener.QueryInsightsListener; import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction; +import org.opensearch.plugin.insights.rules.transport.top_queries.TransportTopQueriesAction; import org.opensearch.plugin.insights.settings.QueryInsightsSettings; import org.opensearch.plugins.ActionPlugin; import org.opensearch.plugins.Plugin; @@ -67,7 +71,7 @@ public Collection<Object> createComponents( ) { // create top n queries service final QueryInsightsService queryInsightsService = new QueryInsightsService(threadPool); - return List.of(queryInsightsService); + return List.of(queryInsightsService, new QueryInsightsListener(clusterService, queryInsightsService)); } @Override @@ -92,12 +96,12 @@ public List<RestHandler> getRestHandlers( final IndexNameExpressionResolver indexNameExpressionResolver, final Supplier<DiscoveryNodes> nodesInCluster ) { - return List.of(); + return List.of(new RestTopQueriesAction()); } @Override public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() { - return List.of(); + return List.of(new ActionPlugin.ActionHandler<>(TopQueriesAction.INSTANCE, TransportTopQueriesAction.class)); } @Override diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java new file mode 100644 index 0000000000000..705273f52a567 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListener.java @@ -0,0 +1,147 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.listener; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.action.search.SearchPhaseContext; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestContext; +import org.opensearch.action.search.SearchRequestOperationsListener; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE; +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE; + +/** + * The listener for query insights services. + * It forwards query-related data to the appropriate query insights stores, + * either for each request or for each phase. + * + * @opensearch.internal + */ +public final class QueryInsightsListener extends SearchRequestOperationsListener { + private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false")); + + private static final Logger log = LogManager.getLogger(QueryInsightsListener.class); + + private final QueryInsightsService queryInsightsService; + + /** + * Constructor for QueryInsightsListener + * + * @param clusterService The Node's cluster service. + * @param queryInsightsService The topQueriesByLatencyService associated with this listener + */ + @Inject + public QueryInsightsListener(final ClusterService clusterService, final QueryInsightsService queryInsightsService) { + this.queryInsightsService = queryInsightsService; + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(TOP_N_LATENCY_QUERIES_ENABLED, v -> this.setEnableTopQueries(MetricType.LATENCY, v)); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer( + TOP_N_LATENCY_QUERIES_SIZE, + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setTopNSize(v), + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateTopNSize(v) + ); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer( + TOP_N_LATENCY_QUERIES_WINDOW_SIZE, + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).setWindowSize(v), + v -> this.queryInsightsService.getTopQueriesService(MetricType.LATENCY).validateWindowSize(v) + ); + this.setEnableTopQueries(MetricType.LATENCY, clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_ENABLED)); + this.queryInsightsService.getTopQueriesService(MetricType.LATENCY) + .setTopNSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_SIZE)); + this.queryInsightsService.getTopQueriesService(MetricType.LATENCY) + .setWindowSize(clusterService.getClusterSettings().get(TOP_N_LATENCY_QUERIES_WINDOW_SIZE)); + } + + /** + * Enable or disable top queries insights collection for {@link MetricType} + * This function will enable or disable the corresponding listeners + * and query insights services. + * + * @param metricType {@link MetricType} + * @param enabled boolean + */ + public void setEnableTopQueries(final MetricType metricType, final boolean enabled) { + boolean isAllMetricsDisabled = !queryInsightsService.isEnabled(); + this.queryInsightsService.enableCollection(metricType, enabled); + if (!enabled) { + // disable QueryInsightsListener only if all metrics collections are disabled now. + if (!queryInsightsService.isEnabled()) { + super.setEnabled(false); + this.queryInsightsService.stop(); + } + } else { + super.setEnabled(true); + // restart QueryInsightsListener only if none of metrics collections is enabled before. + if (isAllMetricsDisabled) { + this.queryInsightsService.stop(); + this.queryInsightsService.start(); + } + } + + } + + @Override + public boolean isEnabled() { + return super.isEnabled(); + } + + @Override + public void onPhaseStart(SearchPhaseContext context) {} + + @Override + public void onPhaseEnd(SearchPhaseContext context, SearchRequestContext searchRequestContext) {} + + @Override + public void onPhaseFailure(SearchPhaseContext context) {} + + @Override + public void onRequestStart(SearchRequestContext searchRequestContext) {} + + @Override + public void onRequestEnd(final SearchPhaseContext context, final SearchRequestContext searchRequestContext) { + final SearchRequest request = context.getRequest(); + try { + Map<MetricType, Number> measurements = new HashMap<>(); + if (queryInsightsService.isCollectionEnabled(MetricType.LATENCY)) { + measurements.put( + MetricType.LATENCY, + TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - searchRequestContext.getAbsoluteStartNanos()) + ); + } + Map<Attribute, Object> attributes = new HashMap<>(); + attributes.put(Attribute.SEARCH_TYPE, request.searchType().toString().toLowerCase(Locale.ROOT)); + attributes.put(Attribute.SOURCE, request.source().toString(FORMAT_PARAMS)); + attributes.put(Attribute.TOTAL_SHARDS, context.getNumShards()); + attributes.put(Attribute.INDICES, request.indices()); + attributes.put(Attribute.PHASE_LATENCY_MAP, searchRequestContext.phaseTookMap()); + SearchQueryRecord record = new SearchQueryRecord(request.getOrCreateAbsoluteStartMillis(), measurements, attributes); + queryInsightsService.addRecord(record); + } catch (Exception e) { + log.error(String.format(Locale.ROOT, "fail to ingest query insight data, error: %s", e)); + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java new file mode 100644 index 0000000000000..3cb9cacf7fd1c --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/core/listener/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Listeners for Query Insights + */ +package org.opensearch.plugin.insights.core.listener; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java new file mode 100644 index 0000000000000..9b6b5856f7d27 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions, Requests and Responses for Query Insights + */ +package org.opensearch.plugin.insights.rules.action; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java new file mode 100644 index 0000000000000..26cff82aae52e --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueries.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.support.nodes.BaseNodeResponse; +import org.opensearch.cluster.node.DiscoveryNode; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentObject; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.io.IOException; +import java.util.List; + +/** + * Holds all top queries records by resource usage or latency on a node + * Mainly used in the top N queries node response workflow. + * + * @opensearch.internal + */ +public class TopQueries extends BaseNodeResponse implements ToXContentObject { + /** The store to keep the top queries records */ + private final List<SearchQueryRecord> topQueriesRecords; + + /** + * Create the TopQueries Object from StreamInput + * @param in A {@link StreamInput} object. + * @throws IOException IOException + */ + public TopQueries(final StreamInput in) throws IOException { + super(in); + topQueriesRecords = in.readList(SearchQueryRecord::new); + } + + /** + * Create the TopQueries Object + * @param node A node that is part of the cluster. + * @param searchQueryRecords A list of SearchQueryRecord associated in this TopQueries. + */ + public TopQueries(final DiscoveryNode node, final List<SearchQueryRecord> searchQueryRecords) { + super(node); + topQueriesRecords = searchQueryRecords; + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + if (topQueriesRecords != null) { + for (SearchQueryRecord record : topQueriesRecords) { + record.toXContent(builder, params); + } + } + return builder; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeList(topQueriesRecords); + + } + + /** + * Get all top queries records + * + * @return the top queries records in this node response + */ + public List<SearchQueryRecord> getTopQueriesRecord() { + return topQueriesRecords; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java new file mode 100644 index 0000000000000..b8ed69fa5692b --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesAction.java @@ -0,0 +1,32 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.ActionType; + +/** + * Transport action for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesAction extends ActionType<TopQueriesResponse> { + + /** + * The TopQueriesAction Instance. + */ + public static final TopQueriesAction INSTANCE = new TopQueriesAction(); + /** + * The name of this Action + */ + public static final String NAME = "cluster:admin/opensearch/insights/top_queries"; + + private TopQueriesAction() { + super(NAME, TopQueriesResponse::new); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java new file mode 100644 index 0000000000000..3bdff2c403161 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequest.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.plugin.insights.rules.model.MetricType; + +import java.io.IOException; + +/** + * A request to get cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesRequest extends BaseNodesRequest<TopQueriesRequest> { + + final MetricType metricType; + + /** + * Constructor for TopQueriesRequest + * + * @param in A {@link StreamInput} object. + * @throws IOException if the stream cannot be deserialized. + */ + public TopQueriesRequest(final StreamInput in) throws IOException { + super(in); + this.metricType = MetricType.readFromStream(in); + } + + /** + * Get top queries from nodes based on the nodes ids specified. + * If none are passed, cluster level top queries will be returned. + * + * @param metricType {@link MetricType} + * @param nodesIds the nodeIds specified in the request + */ + public TopQueriesRequest(final MetricType metricType, final String... nodesIds) { + super(nodesIds); + this.metricType = metricType; + } + + /** + * Get the type of requested metrics + */ + public MetricType getMetricType() { + return metricType; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(metricType.toString()); + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java new file mode 100644 index 0000000000000..2e66bb7f77baf --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponse.java @@ -0,0 +1,143 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.nodes.BaseNodesResponse; +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.xcontent.XContentFactory; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.core.xcontent.ToXContentFragment; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.model.Attribute; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.stream.Collectors; + +/** + * Transport response for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TopQueriesResponse extends BaseNodesResponse<TopQueries> implements ToXContentFragment { + + private static final String CLUSTER_LEVEL_RESULTS_KEY = "top_queries"; + private final MetricType metricType; + private final int top_n_size; + + /** + * Constructor for TopQueriesResponse. + * + * @param in A {@link StreamInput} object. + * @throws IOException if the stream cannot be deserialized. + */ + public TopQueriesResponse(final StreamInput in) throws IOException { + super(in); + top_n_size = in.readInt(); + metricType = in.readEnum(MetricType.class); + } + + /** + * Constructor for TopQueriesResponse + * + * @param clusterName The current cluster name + * @param nodes A list that contains top queries results from all nodes + * @param failures A list that contains FailedNodeException + * @param top_n_size The top N size to return to the user + * @param metricType the {@link MetricType} to be returned in this response + */ + public TopQueriesResponse( + final ClusterName clusterName, + final List<TopQueries> nodes, + final List<FailedNodeException> failures, + final int top_n_size, + final MetricType metricType + ) { + super(clusterName, nodes, failures); + this.top_n_size = top_n_size; + this.metricType = metricType; + } + + @Override + protected List<TopQueries> readNodesFrom(final StreamInput in) throws IOException { + return in.readList(TopQueries::new); + } + + @Override + protected void writeNodesTo(final StreamOutput out, final List<TopQueries> nodes) throws IOException { + out.writeList(nodes); + out.writeLong(top_n_size); + out.writeEnum(metricType); + } + + @Override + public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException { + final List<TopQueries> results = getNodes(); + postProcess(results); + builder.startObject(); + toClusterLevelResult(builder, params, results); + return builder.endObject(); + } + + @Override + public String toString() { + try { + final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + this.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return builder.toString(); + } catch (IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } + + /** + * Post process the top queries results to add customized attributes + * + * @param results the top queries results + */ + private void postProcess(final List<TopQueries> results) { + for (TopQueries topQueries : results) { + final String nodeId = topQueries.getNode().getId(); + for (SearchQueryRecord record : topQueries.getTopQueriesRecord()) { + record.addAttribute(Attribute.NODE_ID, nodeId); + } + } + } + + /** + * Merge top n queries results from nodes into cluster level results in XContent format. + * + * @param builder XContent builder + * @param params serialization parameters + * @param results top queries results from all nodes + * @throws IOException if an error occurs + */ + private void toClusterLevelResult(final XContentBuilder builder, final Params params, final List<TopQueries> results) + throws IOException { + final List<SearchQueryRecord> all_records = results.stream() + .map(TopQueries::getTopQueriesRecord) + .flatMap(Collection::stream) + .sorted((a, b) -> SearchQueryRecord.compare(a, b, metricType) * -1) + .limit(top_n_size) + .collect(Collectors.toList()); + builder.startArray(CLUSTER_LEVEL_RESULTS_KEY); + for (SearchQueryRecord record : all_records) { + record.toXContent(builder, params); + } + builder.endArray(); + } + +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java new file mode 100644 index 0000000000000..3cc7900e5ce7d --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/action/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions, Requests and Responses for Top N Queries + */ +package org.opensearch.plugin.insights.rules.action.top_queries; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java new file mode 100644 index 0000000000000..3787f05f65552 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Rest Handlers for Query Insights + */ +package org.opensearch.plugin.insights.rules.resthandler; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java new file mode 100644 index 0000000000000..6aa511c626ab1 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesAction.java @@ -0,0 +1,99 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.resthandler.top_queries; + +import org.opensearch.client.node.NodeClient; +import org.opensearch.common.settings.Settings; +import org.opensearch.core.common.Strings; +import org.opensearch.core.rest.RestStatus; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.rest.BaseRestHandler; +import org.opensearch.rest.BytesRestResponse; +import org.opensearch.rest.RestChannel; +import org.opensearch.rest.RestRequest; +import org.opensearch.rest.RestResponse; +import org.opensearch.rest.action.RestResponseListener; + +import java.util.List; +import java.util.Locale; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.opensearch.plugin.insights.settings.QueryInsightsSettings.TOP_QUERIES_BASE_URI; +import static org.opensearch.rest.RestRequest.Method.GET; + +/** + * Rest action to get Top N queries by certain metric type + * + * @opensearch.api + */ +public class RestTopQueriesAction extends BaseRestHandler { + /** The metric types that are allowed in top N queries */ + static final Set<String> ALLOWED_METRICS = MetricType.allMetricTypes().stream().map(MetricType::toString).collect(Collectors.toSet()); + + /** + * Constructor for RestTopQueriesAction + */ + public RestTopQueriesAction() {} + + @Override + public List<Route> routes() { + return List.of( + new Route(GET, TOP_QUERIES_BASE_URI), + new Route(GET, String.format(Locale.ROOT, "%s/{nodeId}", TOP_QUERIES_BASE_URI)) + ); + } + + @Override + public String getName() { + return "query_insights_top_queries_action"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) { + final TopQueriesRequest topQueriesRequest = prepareRequest(request); + topQueriesRequest.timeout(request.param("timeout")); + + return channel -> client.execute(TopQueriesAction.INSTANCE, topQueriesRequest, topQueriesResponse(channel)); + } + + static TopQueriesRequest prepareRequest(final RestRequest request) { + final String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + final String metricType = request.param("type", MetricType.LATENCY.toString()); + if (!ALLOWED_METRICS.contains(metricType)) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "request [%s] contains invalid metric type [%s]", request.path(), metricType) + ); + } + return new TopQueriesRequest(MetricType.fromString(metricType), nodesIds); + } + + @Override + protected Set<String> responseParams() { + return Settings.FORMAT_PARAMS; + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } + + private RestResponseListener<TopQueriesResponse> topQueriesResponse(final RestChannel channel) { + return new RestResponseListener<>(channel) { + @Override + public RestResponse buildResponse(final TopQueriesResponse response) throws Exception { + return new BytesRestResponse(RestStatus.OK, response.toXContent(channel.newBuilder(), ToXContent.EMPTY_PARAMS)); + } + }; + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java new file mode 100644 index 0000000000000..087cf7d765f8c --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Rest Handlers for Top N Queries + */ +package org.opensearch.plugin.insights.rules.resthandler.top_queries; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java new file mode 100644 index 0000000000000..f3a1c70b9af57 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions for Query Insights. + */ +package org.opensearch.plugin.insights.rules.transport; diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java new file mode 100644 index 0000000000000..ddf614211bc41 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesAction.java @@ -0,0 +1,155 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.transport.top_queries; + +import org.opensearch.OpenSearchException; +import org.opensearch.action.FailedNodeException; +import org.opensearch.action.support.ActionFilters; +import org.opensearch.action.support.nodes.TransportNodesAction; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.inject.Inject; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.common.io.stream.StreamOutput; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueries; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportRequest; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +/** + * Transport action for cluster/node level top queries information. + * + * @opensearch.internal + */ +public class TransportTopQueriesAction extends TransportNodesAction< + TopQueriesRequest, + TopQueriesResponse, + TransportTopQueriesAction.NodeRequest, + TopQueries> { + + private final QueryInsightsService queryInsightsService; + + /** + * Create the TransportTopQueriesAction Object + + * @param threadPool The OpenSearch thread pool to run async tasks + * @param clusterService The clusterService of this node + * @param transportService The TransportService of this node + * @param queryInsightsService The topQueriesByLatencyService associated with this Transport Action + * @param actionFilters the action filters + */ + @Inject + public TransportTopQueriesAction( + final ThreadPool threadPool, + final ClusterService clusterService, + final TransportService transportService, + final QueryInsightsService queryInsightsService, + final ActionFilters actionFilters + ) { + super( + TopQueriesAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + TopQueriesRequest::new, + NodeRequest::new, + ThreadPool.Names.GENERIC, + TopQueries.class + ); + this.queryInsightsService = queryInsightsService; + } + + @Override + protected TopQueriesResponse newResponse( + final TopQueriesRequest topQueriesRequest, + final List<TopQueries> responses, + final List<FailedNodeException> failures + ) { + if (topQueriesRequest.getMetricType() == MetricType.LATENCY) { + return new TopQueriesResponse( + clusterService.getClusterName(), + responses, + failures, + clusterService.getClusterSettings().get(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE), + MetricType.LATENCY + ); + } else { + throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", topQueriesRequest.getMetricType())); + } + } + + @Override + protected NodeRequest newNodeRequest(final TopQueriesRequest request) { + return new NodeRequest(request); + } + + @Override + protected TopQueries newNodeResponse(final StreamInput in) throws IOException { + return new TopQueries(in); + } + + @Override + protected TopQueries nodeOperation(final NodeRequest nodeRequest) { + final TopQueriesRequest request = nodeRequest.request; + if (request.getMetricType() == MetricType.LATENCY) { + return new TopQueries( + clusterService.localNode(), + queryInsightsService.getTopQueriesService(MetricType.LATENCY).getTopQueriesRecords(true) + ); + } else { + throw new OpenSearchException(String.format(Locale.ROOT, "invalid metric type %s", request.getMetricType())); + } + + } + + /** + * Inner Node Top Queries Request + * + * @opensearch.internal + */ + public static class NodeRequest extends TransportRequest { + + final TopQueriesRequest request; + + /** + * Create the NodeResponse object from StreamInput + * + * @param in the StreamInput to read the object + * @throws IOException IOException + */ + public NodeRequest(StreamInput in) throws IOException { + super(in); + request = new TopQueriesRequest(in); + } + + /** + * Create the NodeResponse object from a TopQueriesRequest + * @param request the TopQueriesRequest object + */ + public NodeRequest(final TopQueriesRequest request) { + this.request = request; + } + + @Override + public void writeTo(final StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java new file mode 100644 index 0000000000000..54da0980deff8 --- /dev/null +++ b/plugins/query-insights/src/main/java/org/opensearch/plugin/insights/rules/transport/top_queries/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Transport Actions for Top N Queries. + */ +package org.opensearch.plugin.insights.rules.transport.top_queries; diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java index f6d95450855de..273b69e483e8c 100644 --- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsPluginTests.java @@ -14,11 +14,16 @@ import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.core.action.ActionResponse; +import org.opensearch.plugin.insights.core.listener.QueryInsightsListener; import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesAction; +import org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction; import org.opensearch.plugin.insights.settings.QueryInsightsSettings; import org.opensearch.plugins.ActionPlugin; import org.opensearch.rest.RestHandler; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ExecutorBuilder; +import org.opensearch.threadpool.ScalingExecutorBuilder; import org.opensearch.threadpool.ThreadPool; import org.junit.Before; @@ -74,18 +79,29 @@ public void testCreateComponent() { null, null ); - assertEquals(1, components.size()); + assertEquals(2, components.size()); assertTrue(components.get(0) instanceof QueryInsightsService); + assertTrue(components.get(1) instanceof QueryInsightsListener); + } + + public void testGetExecutorBuilders() { + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + List<ExecutorBuilder<?>> executorBuilders = queryInsightsPlugin.getExecutorBuilders(settings); + assertEquals(1, executorBuilders.size()); + assertTrue(executorBuilders.get(0) instanceof ScalingExecutorBuilder); } public void testGetRestHandlers() { List<RestHandler> components = queryInsightsPlugin.getRestHandlers(Settings.EMPTY, null, null, null, null, null, null); - assertEquals(0, components.size()); + assertEquals(1, components.size()); + assertTrue(components.get(0) instanceof RestTopQueriesAction); } public void testGetActions() { List<ActionPlugin.ActionHandler<? extends ActionRequest, ? extends ActionResponse>> components = queryInsightsPlugin.getActions(); - assertEquals(0, components.size()); + assertEquals(1, components.size()); + assertTrue(components.get(0).getAction() instanceof TopQueriesAction); } } diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java index 04f49d5fbc7dd..870ef5b9c8be9 100644 --- a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/QueryInsightsTestUtils.java @@ -9,12 +9,15 @@ package org.opensearch.plugin.insights; import org.opensearch.action.search.SearchType; +import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.util.Maps; import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueries; import org.opensearch.plugin.insights.rules.model.Attribute; import org.opensearch.plugin.insights.rules.model.MetricType; import org.opensearch.plugin.insights.rules.model.SearchQueryRecord; +import org.opensearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; @@ -26,7 +29,11 @@ import java.util.Set; import java.util.TreeSet; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.test.OpenSearchTestCase.buildNewFakeTransportAddress; +import static org.opensearch.test.OpenSearchTestCase.random; import static org.opensearch.test.OpenSearchTestCase.randomAlphaOfLengthBetween; import static org.opensearch.test.OpenSearchTestCase.randomArray; import static org.opensearch.test.OpenSearchTestCase.randomDouble; @@ -79,6 +86,33 @@ public static List<SearchQueryRecord> generateQueryInsightRecords(int lower, int return records; } + public static TopQueries createRandomTopQueries() { + DiscoveryNode node = new DiscoveryNode( + "node_for_top_queries_test", + buildNewFakeTransportAddress(), + emptyMap(), + emptySet(), + VersionUtils.randomVersion(random()) + ); + List<SearchQueryRecord> records = generateQueryInsightRecords(10); + + return new TopQueries(node, records); + } + + public static TopQueries createFixedTopQueries() { + DiscoveryNode node = new DiscoveryNode( + "node_for_top_queries_test", + buildNewFakeTransportAddress(), + emptyMap(), + emptySet(), + VersionUtils.randomVersion(random()) + ); + List<SearchQueryRecord> records = new ArrayList<>(); + records.add(createFixedSearchQueryRecord()); + + return new TopQueries(node, records); + } + public static SearchQueryRecord createFixedSearchQueryRecord() { long timestamp = 1706574180000L; Map<MetricType, Number> measurements = Map.of(MetricType.LATENCY, 1L); diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java new file mode 100644 index 0000000000000..f340950017a5c --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/core/listener/QueryInsightsListenerTests.java @@ -0,0 +1,161 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.core.listener; + +import org.opensearch.action.search.SearchPhaseContext; +import org.opensearch.action.search.SearchRequest; +import org.opensearch.action.search.SearchRequestContext; +import org.opensearch.action.search.SearchType; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.core.service.TopQueriesService; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.opensearch.search.aggregations.support.ValueType; +import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +/** + * Unit Tests for {@link QueryInsightsListener}. + */ +public class QueryInsightsListenerTests extends OpenSearchTestCase { + private final SearchRequestContext searchRequestContext = mock(SearchRequestContext.class); + private final SearchPhaseContext searchPhaseContext = mock(SearchPhaseContext.class); + private final SearchRequest searchRequest = mock(SearchRequest.class); + private final QueryInsightsService queryInsightsService = mock(QueryInsightsService.class); + private final TopQueriesService topQueriesService = mock(TopQueriesService.class); + private ClusterService clusterService; + + @Before + public void setup() { + Settings.Builder settingsBuilder = Settings.builder(); + Settings settings = settingsBuilder.build(); + ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + clusterService = new ClusterService(settings, clusterSettings, null); + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(true); + when(queryInsightsService.getTopQueriesService(MetricType.LATENCY)).thenReturn(topQueriesService); + } + + public void testOnRequestEnd() throws InterruptedException { + Long timestamp = System.currentTimeMillis() - 100L; + SearchType searchType = SearchType.QUERY_THEN_FETCH; + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword")); + searchSourceBuilder.size(0); + + String[] indices = new String[] { "index-1", "index-2" }; + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + phaseLatencyMap.put("expand", 0L); + phaseLatencyMap.put("query", 20L); + phaseLatencyMap.put("fetch", 1L); + + int numberOfShards = 10; + + QueryInsightsListener queryInsightsListener = new QueryInsightsListener(clusterService, queryInsightsService); + + when(searchRequest.getOrCreateAbsoluteStartMillis()).thenReturn(timestamp); + when(searchRequest.searchType()).thenReturn(searchType); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + when(searchRequest.indices()).thenReturn(indices); + when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap); + when(searchPhaseContext.getRequest()).thenReturn(searchRequest); + when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards); + + queryInsightsListener.onRequestEnd(searchPhaseContext, searchRequestContext); + + verify(queryInsightsService, times(1)).addRecord(any()); + } + + public void testConcurrentOnRequestEnd() throws InterruptedException { + Long timestamp = System.currentTimeMillis() - 100L; + SearchType searchType = SearchType.QUERY_THEN_FETCH; + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.aggregation(new TermsAggregationBuilder("agg1").userValueTypeHint(ValueType.STRING).field("type.keyword")); + searchSourceBuilder.size(0); + + String[] indices = new String[] { "index-1", "index-2" }; + + Map<String, Long> phaseLatencyMap = new HashMap<>(); + phaseLatencyMap.put("expand", 0L); + phaseLatencyMap.put("query", 20L); + phaseLatencyMap.put("fetch", 1L); + + int numberOfShards = 10; + + final List<QueryInsightsListener> searchListenersList = new ArrayList<>(); + + when(searchRequest.getOrCreateAbsoluteStartMillis()).thenReturn(timestamp); + when(searchRequest.searchType()).thenReturn(searchType); + when(searchRequest.source()).thenReturn(searchSourceBuilder); + when(searchRequest.indices()).thenReturn(indices); + when(searchRequestContext.phaseTookMap()).thenReturn(phaseLatencyMap); + when(searchPhaseContext.getRequest()).thenReturn(searchRequest); + when(searchPhaseContext.getNumShards()).thenReturn(numberOfShards); + + int numRequests = 50; + Thread[] threads = new Thread[numRequests]; + Phaser phaser = new Phaser(numRequests + 1); + CountDownLatch countDownLatch = new CountDownLatch(numRequests); + + for (int i = 0; i < numRequests; i++) { + searchListenersList.add(new QueryInsightsListener(clusterService, queryInsightsService)); + } + + for (int i = 0; i < numRequests; i++) { + int finalI = i; + threads[i] = new Thread(() -> { + phaser.arriveAndAwaitAdvance(); + QueryInsightsListener thisListener = searchListenersList.get(finalI); + thisListener.onRequestEnd(searchPhaseContext, searchRequestContext); + countDownLatch.countDown(); + }); + threads[i].start(); + } + phaser.arriveAndAwaitAdvance(); + countDownLatch.await(); + + verify(queryInsightsService, times(numRequests)).addRecord(any()); + } + + public void testSetEnabled() { + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(true); + QueryInsightsListener queryInsightsListener = new QueryInsightsListener(clusterService, queryInsightsService); + queryInsightsListener.setEnableTopQueries(MetricType.LATENCY, true); + assertTrue(queryInsightsListener.isEnabled()); + + when(queryInsightsService.isCollectionEnabled(MetricType.LATENCY)).thenReturn(false); + when(queryInsightsService.isCollectionEnabled(MetricType.CPU)).thenReturn(false); + when(queryInsightsService.isCollectionEnabled(MetricType.JVM)).thenReturn(false); + queryInsightsListener.setEnableTopQueries(MetricType.LATENCY, false); + assertFalse(queryInsightsListener.isEnabled()); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java new file mode 100644 index 0000000000000..619fd4b33a3dc --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesRequestTests.java @@ -0,0 +1,43 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.test.OpenSearchTestCase; + +/** + * Granular tests for the {@link TopQueriesRequest} class. + */ +public class TopQueriesRequestTests extends OpenSearchTestCase { + + /** + * Check that we can set the metric type + */ + public void testSetMetricType() throws Exception { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY, randomAlphaOfLength(5)); + TopQueriesRequest deserializedRequest = roundTripRequest(request); + assertEquals(request.getMetricType(), deserializedRequest.getMetricType()); + } + + /** + * Serialize and deserialize a request. + * @param request A request to serialize. + * @return The deserialized, "round-tripped" request. + */ + private static TopQueriesRequest roundTripRequest(TopQueriesRequest request) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + request.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new TopQueriesRequest(in); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java new file mode 100644 index 0000000000000..eeee50d3da703 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesResponseTests.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.cluster.ClusterName; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.core.xcontent.MediaTypeRegistry; +import org.opensearch.core.xcontent.ToXContent; +import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Granular tests for the {@link TopQueriesResponse} class. + */ +public class TopQueriesResponseTests extends OpenSearchTestCase { + + /** + * Check serialization and deserialization + */ + public void testSerialize() throws Exception { + TopQueries topQueries = QueryInsightsTestUtils.createRandomTopQueries(); + ClusterName clusterName = new ClusterName("test-cluster"); + TopQueriesResponse response = new TopQueriesResponse(clusterName, List.of(topQueries), new ArrayList<>(), 10, MetricType.LATENCY); + TopQueriesResponse deserializedResponse = roundTripResponse(response); + assertEquals(response.toString(), deserializedResponse.toString()); + } + + public void testToXContent() throws IOException { + char[] expectedXcontent = + "{\"top_queries\":[{\"timestamp\":1706574180000,\"node_id\":\"node_for_top_queries_test\",\"search_type\":\"query_then_fetch\",\"latency\":1}]}" + .toCharArray(); + TopQueries topQueries = QueryInsightsTestUtils.createFixedTopQueries(); + ClusterName clusterName = new ClusterName("test-cluster"); + TopQueriesResponse response = new TopQueriesResponse(clusterName, List.of(topQueries), new ArrayList<>(), 10, MetricType.LATENCY); + XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.JSON); + char[] xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString().toCharArray(); + Arrays.sort(expectedXcontent); + Arrays.sort(xContent); + + assertEquals(Arrays.hashCode(expectedXcontent), Arrays.hashCode(xContent)); + } + + /** + * Serialize and deserialize a TopQueriesResponse. + * @param response A response to serialize. + * @return The deserialized, "round-tripped" response. + */ + private static TopQueriesResponse roundTripResponse(TopQueriesResponse response) throws Exception { + try (BytesStreamOutput out = new BytesStreamOutput()) { + response.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + return new TopQueriesResponse(in); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java new file mode 100644 index 0000000000000..7db08b53ad1df --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/action/top_queries/TopQueriesTests.java @@ -0,0 +1,35 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.action.top_queries; + +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.core.common.io.stream.StreamInput; +import org.opensearch.plugin.insights.QueryInsightsTestUtils; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; + +/** + * Tests for {@link TopQueries}. + */ +public class TopQueriesTests extends OpenSearchTestCase { + + public void testTopQueries() throws IOException { + TopQueries topQueries = QueryInsightsTestUtils.createRandomTopQueries(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + topQueries.writeTo(out); + try (StreamInput in = out.bytes().streamInput()) { + TopQueries readTopQueries = new TopQueries(in); + assertTrue( + QueryInsightsTestUtils.checkRecordsEquals(topQueries.getTopQueriesRecord(), readTopQueries.getTopQueriesRecord()) + ); + } + } + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java new file mode 100644 index 0000000000000..ac19fa2a7348f --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/resthandler/top_queries/RestTopQueriesActionTests.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.resthandler.top_queries; + +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.rest.RestHandler; +import org.opensearch.rest.RestRequest; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.test.rest.FakeRestRequest; + +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +import static org.opensearch.plugin.insights.rules.resthandler.top_queries.RestTopQueriesAction.ALLOWED_METRICS; + +public class RestTopQueriesActionTests extends OpenSearchTestCase { + + public void testEmptyNodeIdsValidType() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomFrom(ALLOWED_METRICS)); + RestRequest restRequest = buildRestRequest(params); + TopQueriesRequest actual = RestTopQueriesAction.prepareRequest(restRequest); + assertEquals(0, actual.nodesIds().length); + } + + public void testNodeIdsValid() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomFrom(ALLOWED_METRICS)); + String[] nodes = randomArray(1, 10, String[]::new, () -> randomAlphaOfLengthBetween(5, 10)); + params.put("nodeId", String.join(",", nodes)); + + RestRequest restRequest = buildRestRequest(params); + TopQueriesRequest actual = RestTopQueriesAction.prepareRequest(restRequest); + assertArrayEquals(nodes, actual.nodesIds()); + } + + public void testInValidType() { + Map<String, String> params = new HashMap<>(); + params.put("type", randomAlphaOfLengthBetween(5, 10).toUpperCase(Locale.ROOT)); + + RestRequest restRequest = buildRestRequest(params); + Exception exception = assertThrows(IllegalArgumentException.class, () -> { RestTopQueriesAction.prepareRequest(restRequest); }); + assertEquals( + String.format(Locale.ROOT, "request [/_insights/top_queries] contains invalid metric type [%s]", params.get("type")), + exception.getMessage() + ); + } + + public void testGetRoutes() { + RestTopQueriesAction action = new RestTopQueriesAction(); + List<RestHandler.Route> routes = action.routes(); + assertEquals(2, routes.size()); + assertEquals("query_insights_top_queries_action", action.getName()); + } + + private FakeRestRequest buildRestRequest(Map<String, String> params) { + return new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET) + .withPath("/_insights/top_queries") + .withParams(params) + .build(); + } +} diff --git a/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java new file mode 100644 index 0000000000000..a5f36b6e8cce0 --- /dev/null +++ b/plugins/query-insights/src/test/java/org/opensearch/plugin/insights/rules/transport/top_queries/TransportTopQueriesActionTests.java @@ -0,0 +1,84 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.plugin.insights.rules.transport.top_queries; + +import org.opensearch.action.support.ActionFilters; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.plugin.insights.core.service.QueryInsightsService; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesRequest; +import org.opensearch.plugin.insights.rules.action.top_queries.TopQueriesResponse; +import org.opensearch.plugin.insights.rules.model.MetricType; +import org.opensearch.plugin.insights.settings.QueryInsightsSettings; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; +import org.junit.Before; + +import java.util.List; + +import static org.mockito.Mockito.mock; + +public class TransportTopQueriesActionTests extends OpenSearchTestCase { + + private final ThreadPool threadPool = mock(ThreadPool.class); + + private final Settings.Builder settingsBuilder = Settings.builder(); + private final Settings settings = settingsBuilder.build(); + private final ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + private final ClusterService clusterService = new ClusterService(settings, clusterSettings, threadPool); + private final TransportService transportService = mock(TransportService.class); + private final QueryInsightsService topQueriesByLatencyService = mock(QueryInsightsService.class); + private final ActionFilters actionFilters = mock(ActionFilters.class); + private final TransportTopQueriesAction transportTopQueriesAction = new TransportTopQueriesAction( + threadPool, + clusterService, + transportService, + topQueriesByLatencyService, + actionFilters + ); + private final DummyParentAction dummyParentAction = new DummyParentAction( + threadPool, + clusterService, + transportService, + topQueriesByLatencyService, + actionFilters + ); + + class DummyParentAction extends TransportTopQueriesAction { + public DummyParentAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + QueryInsightsService topQueriesByLatencyService, + ActionFilters actionFilters + ) { + super(threadPool, clusterService, transportService, topQueriesByLatencyService, actionFilters); + } + + public TopQueriesResponse createNewResponse() { + TopQueriesRequest request = new TopQueriesRequest(MetricType.LATENCY); + return newResponse(request, List.of(), List.of()); + } + } + + @Before + public void setup() { + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_ENABLED); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_SIZE); + clusterSettings.registerSetting(QueryInsightsSettings.TOP_N_LATENCY_QUERIES_WINDOW_SIZE); + } + + public void testNewResponse() { + TopQueriesResponse response = dummyParentAction.createNewResponse(); + assertNotNull(response); + } + +} From cf3bbb8996923c78326bdba0c61444b510ff6d31 Mon Sep 17 00:00:00 2001 From: Mohit Godwani <81609427+mgodwan@users.noreply.github.com> Date: Tue, 6 Feb 2024 18:48:41 +0530 Subject: [PATCH 176/178] Disable seg-rep for fuzzy filter postings upgrade IT (#12183) Signed-off-by: mgodwan <mgodwan@amazon.com> --- .../src/test/java/org/opensearch/upgrades/IndexingIT.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index 08a5e92fc2d02..8e8734b5d62b3 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -360,7 +360,6 @@ public void testIndexingWithFuzzyFilterPostings() throws Exception { Settings.Builder settings = Settings.builder() .put(IndexMetadata.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), shardCount) .put(IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), replicaCount) - .put(IndexMetadata.SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT) .put( EngineConfig.INDEX_CODEC_SETTING.getKey(), randomFrom(new ArrayList<>(CODECS) { @@ -406,7 +405,6 @@ public void testIndexingWithFuzzyFilterPostings() throws Exception { throw new UnsupportedOperationException("Unknown cluster type [" + CLUSTER_TYPE + "]"); } - waitForSearchableDocs(indexName, shardCount, replicaCount); assertCount(indexName, expectedCount); if (CLUSTER_TYPE != ClusterType.OLD) { @@ -416,14 +414,12 @@ public void testIndexingWithFuzzyFilterPostings() throws Exception { toBeDeleted.addParameter("refresh", "true"); toBeDeleted.setJsonEntity("{\"f1\": \"delete-me\"}"); client().performRequest(toBeDeleted); - waitForSearchableDocs(indexName, shardCount, replicaCount); assertCount(indexName, expectedCount + 6); logger.info("--> Delete previously added doc and verify doc count"); Request delete = new Request("DELETE", "/" + indexName + "/_doc/to_be_deleted"); delete.addParameter("refresh", "true"); client().performRequest(delete); - waitForSearchableDocs(indexName, shardCount, replicaCount); assertCount(indexName, expectedCount + 5); //forceMergeAndVerify(indexName, shardCount * (1 + replicaCount)); From 8d54278cc5e2b9b73c0825cc430747f03ed96349 Mon Sep 17 00:00:00 2001 From: gaobinlong <gbinlong@amazon.com> Date: Wed, 7 Feb 2024 01:35:05 +0800 Subject: [PATCH 177/178] Add remove_by_pattern ingest processor (#11920) * Add remove_by_pattern ingest processor * Modify change log * Remove some duplicated checks * Add more yml test case * Fix typo --------- Signed-off-by: Gao Binlong <gbinlong@amazon.com> --- CHANGELOG.md | 1 + .../common/IngestCommonModulePlugin.java | 1 + .../common/RemoveByPatternProcessor.java | 180 ++++++++++++++++++ .../RemoveByPatternProcessorFactoryTests.java | 114 +++++++++++ .../common/RemoveByPatternProcessorTests.java | 96 ++++++++++ .../rest-api-spec/test/ingest/10_basic.yml | 17 ++ .../310_remove_by_pattern_processor.yml | 146 ++++++++++++++ 7 files changed, 555 insertions(+) create mode 100644 modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java create mode 100644 modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java create mode 100644 modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java create mode 100644 modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b78222aa742f..a6a78e0c19e98 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -132,6 +132,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Add copy ingest processor ([#11870](https://github.com/opensearch-project/OpenSearch/pull/11870)) - Introduce new feature flag "WRITEABLE_REMOTE_INDEX" to gate the writeable remote index functionality ([#11717](https://github.com/opensearch-project/OpenSearch/pull/11170)) - Bump OpenTelemetry from 1.32.0 to 1.34.1 ([#11891](https://github.com/opensearch-project/OpenSearch/pull/11891)) +- Add remove_by_pattern ingest processor ([#11920](https://github.com/opensearch-project/OpenSearch/pull/11920)) - Support index level allocation filtering for searchable snapshot index ([#11563](https://github.com/opensearch-project/OpenSearch/pull/11563)) - Add `org.opensearch.rest.MethodHandlers` and `RestController#getAllHandlers` ([11876](https://github.com/opensearch-project/OpenSearch/pull/11876)) - New DateTime format for RFC3339 compatible date fields ([#11465](https://github.com/opensearch-project/OpenSearch/pull/11465)) diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java index 7c1b4841122b0..ff6a322ede38f 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/IngestCommonModulePlugin.java @@ -107,6 +107,7 @@ public Map<String, Processor.Factory> getProcessors(Processor.Parameters paramet processors.put(HtmlStripProcessor.TYPE, new HtmlStripProcessor.Factory()); processors.put(CsvProcessor.TYPE, new CsvProcessor.Factory()); processors.put(CopyProcessor.TYPE, new CopyProcessor.Factory(parameters.scriptService)); + processors.put(RemoveByPatternProcessor.TYPE, new RemoveByPatternProcessor.Factory()); return Collections.unmodifiableMap(processors); } diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java new file mode 100644 index 0000000000000..da87f5201db72 --- /dev/null +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/RemoveByPatternProcessor.java @@ -0,0 +1,180 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.common.Nullable; +import org.opensearch.common.ValidationException; +import org.opensearch.common.regex.Regex; +import org.opensearch.core.common.Strings; +import org.opensearch.ingest.AbstractProcessor; +import org.opensearch.ingest.ConfigurationUtils; +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import static org.opensearch.ingest.ConfigurationUtils.newConfigurationException; + +/** + * Processor that removes existing fields by field patterns or excluding field patterns. + */ +public final class RemoveByPatternProcessor extends AbstractProcessor { + + public static final String TYPE = "remove_by_pattern"; + private final List<String> fieldPatterns; + private final List<String> excludeFieldPatterns; + + RemoveByPatternProcessor( + String tag, + String description, + @Nullable List<String> fieldPatterns, + @Nullable List<String> excludeFieldPatterns + ) { + super(tag, description); + if (fieldPatterns != null && excludeFieldPatterns != null || fieldPatterns == null && excludeFieldPatterns == null) { + throw new IllegalArgumentException("either fieldPatterns and excludeFieldPatterns must be set"); + } + if (fieldPatterns == null) { + this.fieldPatterns = null; + this.excludeFieldPatterns = new ArrayList<>(excludeFieldPatterns); + } else { + this.fieldPatterns = new ArrayList<>(fieldPatterns); + this.excludeFieldPatterns = null; + } + } + + public List<String> getFieldPatterns() { + return fieldPatterns; + } + + public List<String> getExcludeFieldPatterns() { + return excludeFieldPatterns; + } + + @Override + public IngestDocument execute(IngestDocument document) { + Set<String> existingFields = new HashSet<>(document.getSourceAndMetadata().keySet()); + Set<String> metadataFields = document.getMetadata() + .keySet() + .stream() + .map(IngestDocument.Metadata::getFieldName) + .collect(Collectors.toSet()); + + if (fieldPatterns != null && !fieldPatterns.isEmpty()) { + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field)) { + final boolean matched = fieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field)); + if (matched) { + document.removeField(field); + } + } + }); + } + + if (excludeFieldPatterns != null && !excludeFieldPatterns.isEmpty()) { + existingFields.forEach(field -> { + // ignore metadata fields such as _index, _id, etc. + if (!metadataFields.contains(field)) { + final boolean matched = excludeFieldPatterns.stream().anyMatch(pattern -> Regex.simpleMatch(pattern, field)); + if (!matched) { + document.removeField(field); + } + } + }); + } + + return document; + } + + @Override + public String getType() { + return TYPE; + } + + public static final class Factory implements Processor.Factory { + + public Factory() {} + + @Override + public RemoveByPatternProcessor create( + Map<String, Processor.Factory> registry, + String processorTag, + String description, + Map<String, Object> config + ) throws Exception { + final List<String> fieldPatterns = new ArrayList<>(); + final List<String> excludeFieldPatterns = new ArrayList<>(); + final Object fieldPattern = ConfigurationUtils.readOptionalObject(config, "field_pattern"); + final Object excludeFieldPattern = ConfigurationUtils.readOptionalObject(config, "exclude_field_pattern"); + + if (fieldPattern == null && excludeFieldPattern == null || fieldPattern != null && excludeFieldPattern != null) { + throw newConfigurationException( + TYPE, + processorTag, + "field_pattern", + "either field_pattern or exclude_field_pattern must be set" + ); + } + + if (fieldPattern != null) { + if (fieldPattern instanceof List) { + @SuppressWarnings("unchecked") + List<String> fieldPatternList = (List<String>) fieldPattern; + fieldPatterns.addAll(fieldPatternList); + } else { + fieldPatterns.add((String) fieldPattern); + } + validateFieldPatterns(processorTag, fieldPatterns, "field_pattern"); + return new RemoveByPatternProcessor(processorTag, description, fieldPatterns, null); + } else { + if (excludeFieldPattern instanceof List) { + @SuppressWarnings("unchecked") + List<String> excludeFieldPatternList = (List<String>) excludeFieldPattern; + excludeFieldPatterns.addAll(excludeFieldPatternList); + } else { + excludeFieldPatterns.add((String) excludeFieldPattern); + } + validateFieldPatterns(processorTag, excludeFieldPatterns, "exclude_field_pattern"); + return new RemoveByPatternProcessor(processorTag, description, null, excludeFieldPatterns); + } + } + + private void validateFieldPatterns(String processorTag, List<String> patterns, String patternKey) { + List<String> validationErrors = new ArrayList<>(); + for (String fieldPattern : patterns) { + if (fieldPattern.contains("#")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a '#'"); + } + if (fieldPattern.contains(":")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not contain a ':'"); + } + if (fieldPattern.startsWith("_")) { + validationErrors.add(patternKey + " [" + fieldPattern + "] must not start with '_'"); + } + if (Strings.validFileNameExcludingAstrix(fieldPattern) == false) { + validationErrors.add( + patternKey + " [" + fieldPattern + "] must not contain the following characters " + Strings.INVALID_FILENAME_CHARS + ); + } + } + + if (validationErrors.size() > 0) { + ValidationException validationException = new ValidationException(); + validationException.addValidationErrors(validationErrors); + throw newConfigurationException(TYPE, processorTag, patternKey, validationException.getMessage()); + } + } + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java new file mode 100644 index 0000000000000..09ba97ebb4595 --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorFactoryTests.java @@ -0,0 +1,114 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.OpenSearchException; +import org.opensearch.OpenSearchParseException; +import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.CoreMatchers.equalTo; + +public class RemoveByPatternProcessorFactoryTests extends OpenSearchTestCase { + + private RemoveByPatternProcessor.Factory factory; + + @Before + public void init() { + factory = new RemoveByPatternProcessor.Factory(); + } + + public void testCreateFieldPatterns() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("field_pattern", "field1*"); + String processorTag = randomAlphaOfLength(10); + RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field_pattern", List.of("field1*", "field2*")); + removeByPatternProcessor = factory.create(null, processorTag, null, config2); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getFieldPatterns().get(0), equalTo("field1*")); + assertThat(removeByPatternProcessor.getFieldPatterns().get(1), equalTo("field2*")); + + Map<String, Object> config3 = new HashMap<>(); + List<String> patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_"); + config3.put("field_pattern", patterns); + Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3)); + assertThat( + exception.getMessage(), + equalTo( + "[field_pattern] Validation Failed: " + + "1: field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "2: field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "3: field_pattern [#] must not contain a '#';" + + "4: field_pattern [:] must not contain a ':';" + + "5: field_pattern [_] must not start with '_';" + ) + ); + } + + public void testCreateExcludeFieldPatterns() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("exclude_field_pattern", "field1*"); + String processorTag = randomAlphaOfLength(10); + RemoveByPatternProcessor removeByPatternProcessor = factory.create(null, processorTag, null, config); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("exclude_field_pattern", List.of("field1*", "field2*")); + removeByPatternProcessor = factory.create(null, processorTag, null, config2); + assertThat(removeByPatternProcessor.getTag(), equalTo(processorTag)); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(0), equalTo("field1*")); + assertThat(removeByPatternProcessor.getExcludeFieldPatterns().get(1), equalTo("field2*")); + + Map<String, Object> config3 = new HashMap<>(); + List<String> patterns = Arrays.asList("foo*", "*", " ", ",", "#", ":", "_"); + config3.put("exclude_field_pattern", patterns); + Exception exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config3)); + assertThat( + exception.getMessage(), + equalTo( + "[exclude_field_pattern] Validation Failed: " + + "1: exclude_field_pattern [ ] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "2: exclude_field_pattern [,] must not contain the following characters [ , \", *, \\, <, |, ,, >, /, ?];" + + "3: exclude_field_pattern [#] must not contain a '#';" + + "4: exclude_field_pattern [:] must not contain a ':';" + + "5: exclude_field_pattern [_] must not start with '_';" + ) + ); + } + + public void testCreatePatternsFailed() throws Exception { + Map<String, Object> config = new HashMap<>(); + config.put("field_pattern", List.of("foo*")); + config.put("exclude_field_pattern", List.of("bar*")); + String processorTag = randomAlphaOfLength(10); + OpenSearchException exception = expectThrows( + OpenSearchParseException.class, + () -> factory.create(null, processorTag, null, config) + ); + assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set")); + + Map<String, Object> config2 = new HashMap<>(); + config2.put("field_pattern", null); + config2.put("exclude_field_pattern", null); + + exception = expectThrows(OpenSearchParseException.class, () -> factory.create(null, processorTag, null, config2)); + assertThat(exception.getMessage(), equalTo("[field_pattern] either field_pattern or exclude_field_pattern must be set")); + } +} diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java new file mode 100644 index 0000000000000..82ff93de1f44e --- /dev/null +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/RemoveByPatternProcessorTests.java @@ -0,0 +1,96 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.ingest.common; + +import org.opensearch.ingest.IngestDocument; +import org.opensearch.ingest.Processor; +import org.opensearch.ingest.RandomDocumentPicks; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class RemoveByPatternProcessorTests extends OpenSearchTestCase { + + public void testRemoveWithFieldPatterns() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("bar_1", "value"); + ingestDocument.setFieldValue("bar_2", "value"); + List<String> fieldPatterns = new ArrayList<>(); + fieldPatterns.add("foo*"); + fieldPatterns.add("_index*"); + fieldPatterns.add("_id*"); + fieldPatterns.add("_version*"); + Processor processor = new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, null); + processor.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(false)); + assertThat(ingestDocument.hasField("bar_1"), equalTo(true)); + assertThat(ingestDocument.hasField("bar_2"), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true)); + } + + public void testRemoveWithExcludeFieldPatterns() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + ingestDocument.setFieldValue("foo_1", "value"); + ingestDocument.setFieldValue("foo_2", "value"); + ingestDocument.setFieldValue("foo_3", "value"); + List<String> excludeFieldPatterns = new ArrayList<>(); + excludeFieldPatterns.add("foo_3*"); + Processor processorWithExcludeFieldsAndPatterns = new RemoveByPatternProcessor( + randomAlphaOfLength(10), + null, + null, + excludeFieldPatterns + ); + processorWithExcludeFieldsAndPatterns.execute(ingestDocument); + assertThat(ingestDocument.hasField("foo_1"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_2"), equalTo(false)); + assertThat(ingestDocument.hasField("foo_3"), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.INDEX.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.ID.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION.getFieldName()), equalTo(true)); + assertThat(ingestDocument.hasField(IngestDocument.Metadata.VERSION_TYPE.getFieldName()), equalTo(true)); + } + + public void testCreateRemoveByPatternProcessorWithBothFieldsAndExcludeFields() throws Exception { + assertThrows( + "either fieldPatterns and excludeFieldPatterns must be set", + IllegalArgumentException.class, + () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, null, null) + ); + + final List<String> fieldPatterns; + if (randomBoolean()) { + fieldPatterns = new ArrayList<>(); + } else { + fieldPatterns = List.of("foo_1*"); + } + + final List<String> excludeFieldPatterns; + if (randomBoolean()) { + excludeFieldPatterns = new ArrayList<>(); + } else { + excludeFieldPatterns = List.of("foo_2*"); + } + + assertThrows( + "either fieldPatterns and excludeFieldPatterns must be set", + IllegalArgumentException.class, + () -> new RemoveByPatternProcessor(randomAlphaOfLength(10), null, fieldPatterns, excludeFieldPatterns) + ); + } +} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml index 0719082c887f2..6717b3e0ebd99 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/10_basic.yml @@ -53,3 +53,20 @@ nodes.info: {} - contains: { nodes.$cluster_manager.ingest.processors: { type: copy } } + +--- +"Remove_by_pattern processor exists": + - skip: + version: " - 2.11.99" + features: contains + reason: "remove_by_pattern processor was introduced in 2.12.0 and contains is a newly added assertion" + - do: + cluster.state: {} + + # Get cluster-manager node id + - set: { cluster_manager_node: cluster_manager } + + - do: + nodes.info: {} + + - contains: { nodes.$cluster_manager.ingest.processors: { type: remove_by_pattern } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml new file mode 100644 index 0000000000000..397eb8f7b6033 --- /dev/null +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/310_remove_by_pattern_processor.yml @@ -0,0 +1,146 @@ +--- +teardown: + - do: + ingest.delete_pipeline: + id: "my_pipeline" + ignore: 404 + +--- +"Test creating remove_by_pattern processor failed": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern" : "foo*", + "exclude_field_pattern" : "bar*" + } + } + ] + } + + - do: + catch: /\[field\_pattern\] either field\_pattern or exclude\_field\_pattern must be set/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + } + } + ] + } + +--- +"Test remove_by_pattern processor with field_pattern": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern" : ["foo*", "*a*b"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + zoo: "bar", + ab: "bar", + aabb: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: {zoo: "bar" }} + +--- +"Test remove_by_pattern processor with exclude_field_pattern": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "exclude_field_pattern": ["foo*", "a*b*"] + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: 1 + pipeline: "my_pipeline" + body: { + foo1: "bar", + foo2: "bar", + bar: "zoo", + zoo: "bar", + ab: "bar", + aabb: "bar" + } + + - do: + get: + index: test + id: 1 + - match: { _source: { foo1: "bar", foo2: "bar", ab: "bar", aabb: "bar"}} + + +--- +"Test cannot remove metadata fields by remove_by_pattern processor": + - skip: + version: " - 2.11.99" + reason: "introduced in 2.12.0" + - do: + catch: /field\_pattern \[\_id\] must not start with \'\_\'\;/ + ingest.put_pipeline: + id: "my_pipeline" + body: > + { + "description": "_description", + "processors": [ + { + "remove_by_pattern" : { + "field_pattern": "_id" + } + } + ] + } From 52b27f47bca5b3ab52cab237542f32c307d203b4 Mon Sep 17 00:00:00 2001 From: Dharin Shah <Dharin-shah@users.noreply.github.com> Date: Tue, 6 Feb 2024 19:01:54 +0100 Subject: [PATCH 178/178] add support for scored named queries (#11626) Opensearch already support labelling the queries, that returns as a list in the returned results, of which query it matched. However one of the use case while doing hybrid search with query text and dense vector is to determine individual scores for each query type. This is very useful in further analysis and building offline model to generate better weights for ranking score. Hence adding this feature that sends the client to add the score for each matched query. --------- Signed-off-by: Dharin Shah <8616130+Dharin-shah@users.noreply.github.com> Signed-off-by: Dharin Shah <Dharin-shah@users.noreply.github.com> Co-authored-by: Dharin Shah <8616130+Dharin-shah@users.noreply.github.com> --- CHANGELOG.md | 1 + .../resources/rest-api-spec/api/search.json | 5 + .../test/search/350_matched_queries.yml | 103 +++++++++++ .../recovery/IndexPrimaryRelocationIT.java | 21 +-- .../fetch/subphase/MatchedQueriesIT.java | 105 +++++++---- .../search/functionscore/QueryRescorerIT.java | 8 +- .../action/search/SearchRequestBuilder.java | 9 + .../rest/action/search/RestSearchAction.java | 10 +- .../search/DefaultSearchContext.java | 13 ++ .../java/org/opensearch/search/SearchHit.java | 163 +++++++++++------- .../org/opensearch/search/SearchService.java | 1 + .../search/builder/SearchSourceBuilder.java | 34 ++++ .../opensearch/search/fetch/FetchContext.java | 4 + .../opensearch/search/fetch/FetchPhase.java | 2 +- .../fetch/subphase/MatchedQueriesPhase.java | 76 ++++++-- .../internal/FilteredSearchContext.java | 8 + .../search/internal/SearchContext.java | 23 +++ .../search/internal/SubSearchContext.java | 13 ++ .../org/opensearch/search/SearchHitTests.java | 161 ++++++++++++++++- .../opensearch/test/TestSearchContext.java | 12 ++ .../test/hamcrest/OpenSearchAssertions.java | 4 + .../test/hamcrest/OpenSearchMatchers.java | 30 ++++ 22 files changed, 672 insertions(+), 134 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index a6a78e0c19e98..da03b2e4430a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -140,6 +140,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Remove concurrent segment search feature flag for GA launch ([#12074](https://github.com/opensearch-project/OpenSearch/pull/12074)) - Enable Fuzzy codec for doc id fields using a bloom filter ([#11022](https://github.com/opensearch-project/OpenSearch/pull/11022)) - [Metrics Framework] Adds support for Histogram metric ([#12062](https://github.com/opensearch-project/OpenSearch/pull/12062)) +- Support for returning scores in matched queries ([#11626](https://github.com/opensearch-project/OpenSearch/pull/11626)) ### Dependencies - Bumps jetty version to 9.4.52.v20230823 to fix GMS-2023-1857 ([#9822](https://github.com/opensearch-project/OpenSearch/pull/9822)) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index e0fbeeb83ffc4..e78d49a67a98a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -229,6 +229,11 @@ "search_pipeline": { "type": "string", "description": "The search pipeline to use to execute this request" + }, + "include_named_queries_score":{ + "type": "boolean", + "description":"Indicates whether hit.matched_queries should be rendered as a map that includes the name of the matched query associated with its score (true) or as an array containing the name of the matched queries (false)", + "default":false } }, "body":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml new file mode 100644 index 0000000000000..25de51a316bd4 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/350_matched_queries.yml @@ -0,0 +1,103 @@ +setup: + - skip: + version: " - 2.12.0" + reason: "implemented for versions post 2.12.0" + +--- +"matched queries": + - do: + indices.create: + index: test + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "test_1", "_id" : "1" } }' + - '{"field" : 1 }' + - '{ "index" : { "_index" : "test_1", "_id" : "2" } }' + - '{"field" : [1, 2] }' + + - do: + search: + index: test_1 + body: + query: + bool: { + should: [ + { + match: { + field: { + query: 1, + _name: match_field_1 + } + } + }, + { + match: { + field: { + query: 2, + _name: match_field_2, + boost: 10 + } + } + } + ] + } + + - match: {hits.total.value: 2} + - length: {hits.hits.0.matched_queries: 2} + - match: {hits.hits.0.matched_queries: [ "match_field_1", "match_field_2" ]} + - length: {hits.hits.1.matched_queries: 1} + - match: {hits.hits.1.matched_queries: [ "match_field_1" ]} + +--- + +"matched queries with scores": + - do: + indices.create: + index: test + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "test_1", "_id" : "1" } }' + - '{"field" : 1 }' + - '{ "index" : { "_index" : "test_1", "_id" : "2" } }' + - '{"field" : [1, 2] }' + + - do: + search: + include_named_queries_score: true + index: test_1 + body: + query: + bool: { + should: [ + { + match: { + field: { + query: 1, + _name: match_field_1 + } + } + }, + { + match: { + field: { + query: 2, + _name: match_field_2, + boost: 10 + } + } + } + ] + } + + - match: { hits.total.value: 2 } + - length: { hits.hits.0.matched_queries: 2 } + - match: { hits.hits.0.matched_queries.match_field_1: 1 } + - match: { hits.hits.0.matched_queries.match_field_2: 10 } + - length: { hits.hits.1.matched_queries: 1 } + - match: { hits.hits.1.matched_queries.match_field_1: 1 } diff --git a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java index c049c8ed2d4a6..9decd17d95eab 100644 --- a/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -66,19 +66,16 @@ public void testPrimaryRelocationWhileIndexing() throws Exception { ensureGreen("test"); AtomicInteger numAutoGenDocs = new AtomicInteger(); final AtomicBoolean finished = new AtomicBoolean(false); - Thread indexingThread = new Thread() { - @Override - public void run() { - while (finished.get() == false && numAutoGenDocs.get() < 10_000) { - IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); - assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); - DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); - assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); - client().prepareIndex("test").setSource("auto", true).get(); - numAutoGenDocs.incrementAndGet(); - } + Thread indexingThread = new Thread(() -> { + while (finished.get() == false && numAutoGenDocs.get() < 10_000) { + IndexResponse indexResponse = client().prepareIndex("test").setId("id").setSource("field", "value").get(); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); + DeleteResponse deleteResponse = client().prepareDelete("test", "id").get(); + assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); + client().prepareIndex("test").setSource("auto", true).get(); + numAutoGenDocs.incrementAndGet(); } - }; + }); indexingThread.start(); ClusterState initialState = client().admin().cluster().prepareState().get().getState(); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java index 7a828c06c5cd7..a1adc6f99b92a 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fetch/subphase/MatchedQueriesIT.java @@ -61,7 +61,9 @@ import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertHitCount; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasItemInArray; +import static org.hamcrest.Matchers.hasKey; public class MatchedQueriesIT extends ParameterizedStaticSettingsOpenSearchIntegTestCase { @@ -95,15 +97,18 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { .should(rangeQuery("number").gte(2).queryName("test2")) ) ) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("3") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test2")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); } else if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test1")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -113,15 +118,18 @@ public void testSimpleMatchedQueryFromFilteredQuery() throws Exception { .setQuery( boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2")) ) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test1")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test1")); + assertThat(hit.getMatchedQueryScore("test1"), equalTo(1f)); } else if (hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("test2")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("test2")); + assertThat(hit.getMatchedQueryScore("test2"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -147,12 +155,15 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -168,12 +179,15 @@ public void testSimpleMatchedQueryFromTopLevelFilter() throws Exception { assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else if (hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -197,9 +211,11 @@ public void testSimpleMatchedQueryFromTopLevelFilterAndFilteredQuery() throws Ex assertHitCount(searchResponse, 3L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1") || hit.getId().equals("2") || hit.getId().equals("3")) { - assertThat(hit.getMatchedQueries().length, equalTo(2)); - assertThat(hit.getMatchedQueries(), hasItemInArray("name")); - assertThat(hit.getMatchedQueries(), hasItemInArray("title")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(2)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("name")); + assertThat(hit.getMatchedQueryScore("name"), greaterThan(0f)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("title")); + assertThat(hit.getMatchedQueryScore("title"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -231,13 +247,15 @@ public void testRegExpQuerySupportsName() throws InterruptedException { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.regexpQuery("title", "title1").queryName("regex")) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("regex")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("regex")); + assertThat(hit.getMatchedQueryScore("regex"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -252,15 +270,17 @@ public void testPrefixQuerySupportsName() throws InterruptedException { refresh(); indexRandomForConcurrentSearch("test1"); - SearchResponse searchResponse = client().prepareSearch() + var query = client().prepareSearch() .setQuery(QueryBuilders.prefixQuery("title", "title").queryName("prefix")) - .get(); + .setIncludeNamedQueriesScore(true); + var searchResponse = query.get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("prefix")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("prefix")); + assertThat(hit.getMatchedQueryScore("prefix"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -282,8 +302,9 @@ public void testFuzzyQuerySupportsName() throws InterruptedException { for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("fuzzy")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("fuzzy")); + assertThat(hit.getMatchedQueryScore("fuzzy"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -300,13 +321,15 @@ public void testWildcardQuerySupportsName() throws InterruptedException { SearchResponse searchResponse = client().prepareSearch() .setQuery(QueryBuilders.wildcardQuery("title", "titl*").queryName("wildcard")) + .setIncludeNamedQueriesScore(true) .get(); assertHitCount(searchResponse, 1L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("wildcard")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("wildcard")); + assertThat(hit.getMatchedQueryScore("wildcard"), equalTo(1f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -328,8 +351,9 @@ public void testSpanFirstQuerySupportsName() throws InterruptedException { for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("span")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("span")); + assertThat(hit.getMatchedQueryScore("span"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -363,11 +387,13 @@ public void testMatchedWithShould() throws Exception { assertHitCount(searchResponse, 2L); for (SearchHit hit : searchResponse.getHits()) { if (hit.getId().equals("1")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("dolor")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("dolor")); + assertThat(hit.getMatchedQueryScore("dolor"), greaterThan(0f)); } else if (hit.getId().equals("2")) { - assertThat(hit.getMatchedQueries().length, equalTo(1)); - assertThat(hit.getMatchedQueries(), hasItemInArray("elit")); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("elit")); + assertThat(hit.getMatchedQueryScore("elit"), greaterThan(0f)); } else { fail("Unexpected document returned with id " + hit.getId()); } @@ -391,7 +417,10 @@ public void testMatchedWithWrapperQuery() throws Exception { for (QueryBuilder query : queries) { SearchResponse searchResponse = client().prepareSearch().setQuery(query).get(); assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getAt(0).getMatchedQueries()[0], equalTo("abc")); + SearchHit hit = searchResponse.getHits().getAt(0); + assertThat(hit.getMatchedQueriesAndScores().size(), equalTo(1)); + assertThat(hit.getMatchedQueriesAndScores(), hasKey("abc")); + assertThat(hit.getMatchedQueryScore("abc"), greaterThan(0f)); } } } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java index 6c4ea0cdeb1f1..5121d5023fd95 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/functionscore/QueryRescorerIT.java @@ -83,6 +83,7 @@ import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSecondHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertThirdHit; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasId; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasMatchedQueries; import static org.opensearch.test.hamcrest.OpenSearchAssertions.hasScore; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -594,7 +595,7 @@ public void testExplain() throws Exception { SearchResponse searchResponse = client().prepareSearch() .setSearchType(SearchType.DFS_QUERY_THEN_FETCH) - .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR)) + .setQuery(QueryBuilders.matchQuery("field1", "the quick brown").operator(Operator.OR).queryName("hello-world")) .setRescorer(innerRescoreQuery, 5) .setExplain(true) .get(); @@ -602,7 +603,10 @@ public void testExplain() throws Exception { assertFirstHit(searchResponse, hasId("1")); assertSecondHit(searchResponse, hasId("2")); assertThirdHit(searchResponse, hasId("3")); - + final String[] matchedQueries = { "hello-world" }; + assertFirstHit(searchResponse, hasMatchedQueries(matchedQueries)); + assertSecondHit(searchResponse, hasMatchedQueries(matchedQueries)); + assertThirdHit(searchResponse, hasMatchedQueries(matchedQueries)); for (int j = 0; j < 3; j++) { assertThat(searchResponse.getHits().getAt(j).getExplanation().getDescription(), equalTo(descriptionModes[innerMode])); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java index e949c5e0bea29..9dac827e7d518 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/opensearch/action/search/SearchRequestBuilder.java @@ -406,6 +406,15 @@ public SearchRequestBuilder setTrackScores(boolean trackScores) { return this; } + /** + * Applies when fetching scores with named queries, and controls if scores will be tracked as well. + * Defaults to {@code false}. + */ + public SearchRequestBuilder setIncludeNamedQueriesScore(boolean includeNamedQueriesScore) { + sourceBuilder().includeNamedQueriesScores(includeNamedQueriesScore); + return this; + } + /** * Indicates if the total hit count for the query should be tracked. Requests will count total hit count accurately * up to 10,000 by default, see {@link #setTrackTotalHitsUpTo(int)} to change this value or set to true/false to always/never diff --git a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java index 080366e536da1..80dc34c4d5d68 100644 --- a/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/opensearch/rest/action/search/RestSearchAction.java @@ -86,10 +86,13 @@ public class RestSearchAction extends BaseRestHandler { */ public static final String TOTAL_HITS_AS_INT_PARAM = "rest_total_hits_as_int"; public static final String TYPED_KEYS_PARAM = "typed_keys"; + public static final String INCLUDE_NAMED_QUERIES_SCORE_PARAM = "include_named_queries_score"; private static final Set<String> RESPONSE_PARAMS; static { - final Set<String> responseParams = new HashSet<>(Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM)); + final Set<String> responseParams = new HashSet<>( + Arrays.asList(TYPED_KEYS_PARAM, TOTAL_HITS_AS_INT_PARAM, INCLUDE_NAMED_QUERIES_SCORE_PARAM) + ); RESPONSE_PARAMS = Collections.unmodifiableSet(responseParams); } @@ -209,6 +212,7 @@ public static void parseSearchRequest( searchRequest.pipeline(request.param("search_pipeline")); checkRestTotalHits(request, searchRequest); + request.paramAsBoolean(INCLUDE_NAMED_QUERIES_SCORE_PARAM, false); if (searchRequest.pointInTimeBuilder() != null) { preparePointInTime(searchRequest, request, namedWriteableRegistry); @@ -286,6 +290,10 @@ private static void parseSearchSource(final SearchSourceBuilder searchSourceBuil searchSourceBuilder.trackScores(request.paramAsBoolean("track_scores", false)); } + if (request.hasParam("include_named_queries_score")) { + searchSourceBuilder.includeNamedQueriesScores(request.paramAsBoolean("include_named_queries_score", false)); + } + if (request.hasParam("track_total_hits")) { if (Booleans.isBoolean(request.param("track_total_hits"))) { searchSourceBuilder.trackTotalHits(request.paramAsBoolean("track_total_hits", true)); diff --git a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java index 63e32dcf8f0b7..061aa2f6e5896 100644 --- a/server/src/main/java/org/opensearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/opensearch/search/DefaultSearchContext.java @@ -148,6 +148,8 @@ final class DefaultSearchContext extends SearchContext { private SortAndFormats sort; private Float minimumScore; private boolean trackScores = false; // when sorting, track scores as well... + + private boolean includeNamedQueriesScore = false; private int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; private FieldDoc searchAfter; private CollapseContext collapse; @@ -635,6 +637,17 @@ public boolean trackScores() { return this.trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { this.trackTotalHitsUpTo = trackTotalHitsUpTo; diff --git a/server/src/main/java/org/opensearch/search/SearchHit.java b/server/src/main/java/org/opensearch/search/SearchHit.java index 10e65fca3afb5..a3db7e0893a3d 100644 --- a/server/src/main/java/org/opensearch/search/SearchHit.java +++ b/server/src/main/java/org/opensearch/search/SearchHit.java @@ -64,19 +64,21 @@ import org.opensearch.index.mapper.MapperService; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.seqno.SequenceNumbers; +import org.opensearch.rest.action.search.RestSearchAction; import org.opensearch.search.fetch.subphase.highlight.HighlightField; import org.opensearch.search.lookup.SourceLookup; import org.opensearch.transport.RemoteClusterAware; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; @@ -120,7 +122,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do private SearchSortValues sortValues = SearchSortValues.EMPTY; - private String[] matchedQueries = Strings.EMPTY_ARRAY; + private Map<String, Float> matchedQueries = new HashMap<>(); private Explanation explanation; @@ -203,10 +205,20 @@ public SearchHit(StreamInput in) throws IOException { sortValues = new SearchSortValues(in); size = in.readVInt(); - if (size > 0) { - matchedQueries = new String[size]; + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + if (size > 0) { + Map<String, Float> tempMap = in.readMap(StreamInput::readString, StreamInput::readFloat); + matchedQueries = tempMap.entrySet() + .stream() + .sorted(Map.Entry.comparingByKey()) + .collect( + Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (oldValue, newValue) -> oldValue, LinkedHashMap::new) + ); + } + } else { + matchedQueries = new LinkedHashMap<>(size); for (int i = 0; i < size; i++) { - matchedQueries[i] = in.readString(); + matchedQueries.put(in.readString(), Float.NaN); } } // we call the setter here because that also sets the local index parameter @@ -224,36 +236,6 @@ public SearchHit(StreamInput in) throws IOException { } } - private Map<String, DocumentField> readFields(StreamInput in) throws IOException { - Map<String, DocumentField> fields; - int size = in.readVInt(); - if (size == 0) { - fields = emptyMap(); - } else if (size == 1) { - DocumentField hitField = new DocumentField(in); - fields = singletonMap(hitField.getName(), hitField); - } else { - fields = new HashMap<>(size); - for (int i = 0; i < size; i++) { - DocumentField field = new DocumentField(in); - fields.put(field.getName(), field); - } - fields = unmodifiableMap(fields); - } - return fields; - } - - private void writeFields(StreamOutput out, Map<String, DocumentField> fields) throws IOException { - if (fields == null) { - out.writeVInt(0); - } else { - out.writeVInt(fields.size()); - for (DocumentField field : fields.values()) { - field.writeTo(out); - } - } - } - private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); @Override @@ -286,11 +268,13 @@ public void writeTo(StreamOutput out) throws IOException { } sortValues.writeTo(out); - if (matchedQueries.length == 0) { - out.writeVInt(0); + out.writeVInt(matchedQueries.size()); + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + if (!matchedQueries.isEmpty()) { + out.writeMap(matchedQueries, StreamOutput::writeString, StreamOutput::writeFloat); + } } else { - out.writeVInt(matchedQueries.length); - for (String matchedFilter : matchedQueries) { + for (String matchedFilter : matchedQueries.keySet()) { out.writeString(matchedFilter); } } @@ -458,11 +442,11 @@ public DocumentField field(String fieldName) { } /* - * Adds a new DocumentField to the map in case both parameters are not null. - * */ + * Adds a new DocumentField to the map in case both parameters are not null. + * */ public void setDocumentField(String fieldName, DocumentField field) { if (fieldName == null || field == null) return; - if (documentFields.size() == 0) this.documentFields = new HashMap<>(); + if (documentFields.isEmpty()) this.documentFields = new HashMap<>(); this.documentFields.put(fieldName, field); } @@ -475,7 +459,7 @@ public DocumentField removeDocumentField(String fieldName) { * were required to be loaded. */ public Map<String, DocumentField> getFields() { - if (metaFields.size() > 0 || documentFields.size() > 0) { + if (!metaFields.isEmpty() || !documentFields.isEmpty()) { final Map<String, DocumentField> fields = new HashMap<>(); fields.putAll(metaFields); fields.putAll(documentFields); @@ -560,14 +544,45 @@ public String getClusterAlias() { } public void matchedQueries(String[] matchedQueries) { - this.matchedQueries = matchedQueries; + if (matchedQueries != null) { + for (String query : matchedQueries) { + this.matchedQueries.put(query, Float.NaN); + } + } + } + + public void matchedQueriesWithScores(Map<String, Float> matchedQueries) { + if (matchedQueries != null) { + this.matchedQueries = matchedQueries; + } } /** * The set of query and filter names the query matched with. Mainly makes sense for compound filters and queries. */ public String[] getMatchedQueries() { - return this.matchedQueries; + return matchedQueries == null ? new String[0] : matchedQueries.keySet().toArray(new String[0]); + } + + /** + * Returns the score of the provided named query if it matches. + * <p> + * If the 'include_named_queries_score' is not set, this method will return {@link Float#NaN} + * for each named query instead of a numerical score. + * </p> + * + * @param name The name of the query to retrieve the score for. + * @return The score of the named query, or {@link Float#NaN} if 'include_named_queries_score' is not set. + */ + public Float getMatchedQueryScore(String name) { + return getMatchedQueriesAndScores().get(name); + } + + /** + * @return The map of the named queries that matched and their associated score. + */ + public Map<String, Float> getMatchedQueriesAndScores() { + return matchedQueries == null ? Collections.emptyMap() : matchedQueries; } /** @@ -654,7 +669,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t for (DocumentField field : metaFields.values()) { // ignore empty metadata fields - if (field.getValues().size() == 0) { + if (field.getValues().isEmpty()) { continue; } // _ignored is the only multi-valued meta field @@ -670,10 +685,10 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t } if (documentFields.isEmpty() == false && // ignore fields all together if they are all empty - documentFields.values().stream().anyMatch(df -> df.getValues().size() > 0)) { + documentFields.values().stream().anyMatch(df -> !df.getValues().isEmpty())) { builder.startObject(Fields.FIELDS); for (DocumentField field : documentFields.values()) { - if (field.getValues().size() > 0) { + if (!field.getValues().isEmpty()) { field.toXContent(builder, params); } } @@ -687,12 +702,21 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t builder.endObject(); } sortValues.toXContent(builder, params); - if (matchedQueries.length > 0) { - builder.startArray(Fields.MATCHED_QUERIES); - for (String matchedFilter : matchedQueries) { - builder.value(matchedFilter); + if (!matchedQueries.isEmpty()) { + boolean includeMatchedQueriesScore = params.paramAsBoolean(RestSearchAction.INCLUDE_NAMED_QUERIES_SCORE_PARAM, false); + if (includeMatchedQueriesScore) { + builder.startObject(Fields.MATCHED_QUERIES); + for (Map.Entry<String, Float> entry : matchedQueries.entrySet()) { + builder.field(entry.getKey(), entry.getValue()); + } + builder.endObject(); + } else { + builder.startArray(Fields.MATCHED_QUERIES); + for (String matchedFilter : matchedQueries.keySet()) { + builder.value(matchedFilter); + } + builder.endArray(); } - builder.endArray(); } if (getExplanation() != null) { builder.field(Fields._EXPLANATION); @@ -797,7 +821,27 @@ public static void declareInnerHitsParseFields(ObjectParser<Map<String, Object>, (p, c) -> parseInnerHits(p), new ParseField(Fields.INNER_HITS) ); - parser.declareStringArray((map, list) -> map.put(Fields.MATCHED_QUERIES, list), new ParseField(Fields.MATCHED_QUERIES)); + parser.declareField((p, map, context) -> { + XContentParser.Token token = p.currentToken(); + Map<String, Float> matchedQueries = new LinkedHashMap<>(); + if (token == XContentParser.Token.START_OBJECT) { + String fieldName = null; + while ((token = p.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + fieldName = p.currentName(); + } else if (token.isValue()) { + matchedQueries.put(fieldName, p.floatValue()); + } + } + } else if (token == XContentParser.Token.START_ARRAY) { + while (p.nextToken() != XContentParser.Token.END_ARRAY) { + matchedQueries.put(p.text(), Float.NaN); + } + } else { + throw new IllegalStateException("expected object or array but got [" + token + "]"); + } + map.put(Fields.MATCHED_QUERIES, matchedQueries); + }, new ParseField(Fields.MATCHED_QUERIES), ObjectParser.ValueType.OBJECT_ARRAY); parser.declareField( (map, list) -> map.put(Fields.SORT, list), SearchSortValues::fromXContent, @@ -828,7 +872,7 @@ public static SearchHit createFromMap(Map<String, Object> values) { assert shardId.getIndexName().equals(index); searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE)); } else { - // these fields get set anyways when setting the shard target, + // these fields get set anyway when setting the shard target, // but we set them explicitly when we don't have enough info to rebuild the shard target searchHit.index = index; searchHit.clusterAlias = clusterAlias; @@ -842,10 +886,7 @@ public static SearchHit createFromMap(Map<String, Object> values) { searchHit.sourceRef(get(SourceFieldMapper.NAME, values, null)); searchHit.explanation(get(Fields._EXPLANATION, values, null)); searchHit.setInnerHits(get(Fields.INNER_HITS, values, null)); - List<String> matchedQueries = get(Fields.MATCHED_QUERIES, values, null); - if (matchedQueries != null) { - searchHit.matchedQueries(matchedQueries.toArray(new String[0])); - } + searchHit.matchedQueriesWithScores(get(Fields.MATCHED_QUERIES, values, null)); return searchHit; } @@ -965,7 +1006,7 @@ public boolean equals(Object obj) { && Objects.equals(documentFields, other.documentFields) && Objects.equals(metaFields, other.metaFields) && Objects.equals(getHighlightFields(), other.getHighlightFields()) - && Arrays.equals(matchedQueries, other.matchedQueries) + && Objects.equals(getMatchedQueriesAndScores(), other.getMatchedQueriesAndScores()) && Objects.equals(explanation, other.explanation) && Objects.equals(shard, other.shard) && Objects.equals(innerHits, other.innerHits) @@ -985,7 +1026,7 @@ public int hashCode() { documentFields, metaFields, getHighlightFields(), - Arrays.hashCode(matchedQueries), + getMatchedQueriesAndScores(), explanation, shard, innerHits, diff --git a/server/src/main/java/org/opensearch/search/SearchService.java b/server/src/main/java/org/opensearch/search/SearchService.java index 7cd91557fcf6d..62eb597e387e6 100644 --- a/server/src/main/java/org/opensearch/search/SearchService.java +++ b/server/src/main/java/org/opensearch/search/SearchService.java @@ -1274,6 +1274,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc } } context.trackScores(source.trackScores()); + context.includeNamedQueriesScore(source.includeNamedQueriesScore()); if (source.trackTotalHitsUpTo() != null && source.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE && context.scrollContext() != null) { diff --git a/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java index 434e630893f25..bdd92a5baa115 100644 --- a/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/opensearch/search/builder/SearchSourceBuilder.java @@ -117,6 +117,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField IGNORE_FAILURE_FIELD = new ParseField("ignore_failure"); public static final ParseField SORT_FIELD = new ParseField("sort"); public static final ParseField TRACK_SCORES_FIELD = new ParseField("track_scores"); + public static final ParseField INCLUDE_NAMED_QUERIES_SCORE = new ParseField("include_named_queries_score"); public static final ParseField TRACK_TOTAL_HITS_FIELD = new ParseField("track_total_hits"); public static final ParseField INDICES_BOOST_FIELD = new ParseField("indices_boost"); public static final ParseField AGGREGATIONS_FIELD = new ParseField("aggregations"); @@ -175,6 +176,8 @@ public static HighlightBuilder highlight() { private boolean trackScores = false; + private Boolean includeNamedQueriesScore; + private Integer trackTotalHitsUpTo; private SearchAfterBuilder searchAfterBuilder; @@ -276,6 +279,9 @@ public SearchSourceBuilder(StreamInput in) throws IOException { searchPipelineSource = in.readMap(); } } + if (in.getVersion().onOrAfter(Version.V_3_0_0)) { + includeNamedQueriesScore = in.readOptionalBoolean(); + } } @Override @@ -341,6 +347,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(searchPipelineSource); } } + if (out.getVersion().onOrAfter(Version.V_3_0_0)) { + out.writeOptionalBoolean(includeNamedQueriesScore); + } } /** @@ -568,6 +577,22 @@ public SearchSourceBuilder trackScores(boolean trackScores) { return this; } + /** + * Applies when there are named queries, to return the scores along as well + * Defaults to {@code false}. + */ + public SearchSourceBuilder includeNamedQueriesScores(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + /** + * Indicates whether scores will be returned as part of every search matched query.s + */ + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore != null && includeNamedQueriesScore; + } + /** * Indicates whether scores will be tracked for this request. */ @@ -1103,6 +1128,7 @@ private SearchSourceBuilder shallowCopy( rewrittenBuilder.terminateAfter = terminateAfter; rewrittenBuilder.timeout = timeout; rewrittenBuilder.trackScores = trackScores; + rewrittenBuilder.includeNamedQueriesScore = includeNamedQueriesScore; rewrittenBuilder.trackTotalHitsUpTo = trackTotalHitsUpTo; rewrittenBuilder.version = version; rewrittenBuilder.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; @@ -1155,6 +1181,8 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th explain = parser.booleanValue(); } else if (TRACK_SCORES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { trackScores = parser.booleanValue(); + } else if (INCLUDE_NAMED_QUERIES_SCORE.match(currentFieldName, parser.getDeprecationHandler())) { + includeNamedQueriesScore = parser.booleanValue(); } else if (TRACK_TOTAL_HITS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { if (token == XContentParser.Token.VALUE_BOOLEAN || (token == XContentParser.Token.VALUE_STRING && Booleans.isBoolean(parser.text()))) { @@ -1418,6 +1446,10 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t builder.field(TRACK_SCORES_FIELD.getPreferredName(), true); } + if (includeNamedQueriesScore != null) { + builder.field(INCLUDE_NAMED_QUERIES_SCORE.getPreferredName(), includeNamedQueriesScore); + } + if (trackTotalHitsUpTo != null) { builder.field(TRACK_TOTAL_HITS_FIELD.getPreferredName(), trackTotalHitsUpTo); } @@ -1749,6 +1781,7 @@ public int hashCode() { terminateAfter, timeout, trackScores, + includeNamedQueriesScore, version, seqNoAndPrimaryTerm, profile, @@ -1791,6 +1824,7 @@ public boolean equals(Object obj) { && Objects.equals(terminateAfter, other.terminateAfter) && Objects.equals(timeout, other.timeout) && Objects.equals(trackScores, other.trackScores) + && Objects.equals(includeNamedQueriesScore, other.includeNamedQueriesScore) && Objects.equals(version, other.version) && Objects.equals(seqNoAndPrimaryTerm, other.seqNoAndPrimaryTerm) && Objects.equals(profile, other.profile) diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchContext.java b/server/src/main/java/org/opensearch/search/fetch/FetchContext.java index 7e36ace9e2112..5be3733106655 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchContext.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchContext.java @@ -188,6 +188,10 @@ public boolean fetchScores() { return searchContext.sort() != null && searchContext.trackScores(); } + public boolean includeNamedQueriesScore() { + return searchContext.includeNamedQueriesScore(); + } + /** * Configuration for returning inner hits */ diff --git a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java index a842c0f1adc6e..1698f41caaf2b 100644 --- a/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/FetchPhase.java @@ -91,7 +91,7 @@ /** * Fetch phase of a search request, used to fetch the actual top matching documents to be returned to the client, identified - * after reducing all of the matches returned by the query phase + * after reducing all the matches returned by the query phase * * @opensearch.api */ diff --git a/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java b/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java index 6c589438d6b4c..406d9c8b4bc03 100644 --- a/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java +++ b/server/src/main/java/org/opensearch/search/fetch/subphase/MatchedQueriesPhase.java @@ -28,12 +28,12 @@ * Modifications Copyright OpenSearch Contributors. See * GitHub history for details. */ - package org.opensearch.search.fetch.subphase; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -45,6 +45,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; @@ -67,25 +68,69 @@ public FetchSubPhaseProcessor getProcessor(FetchContext context) throws IOExcept if (namedQueries.isEmpty()) { return null; } + + Map<String, Weight> weights = prepareWeights(context, namedQueries); + + return context.includeNamedQueriesScore() ? createScoringProcessor(weights) : createNonScoringProcessor(weights); + } + + private Map<String, Weight> prepareWeights(FetchContext context, Map<String, Query> namedQueries) throws IOException { Map<String, Weight> weights = new HashMap<>(); + ScoreMode scoreMode = context.includeNamedQueriesScore() ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES; for (Map.Entry<String, Query> entry : namedQueries.entrySet()) { - weights.put( - entry.getKey(), - context.searcher().createWeight(context.searcher().rewrite(entry.getValue()), ScoreMode.COMPLETE_NO_SCORES, 1) - ); + weights.put(entry.getKey(), context.searcher().createWeight(context.searcher().rewrite(entry.getValue()), scoreMode, 1)); } + return weights; + } + + private FetchSubPhaseProcessor createScoringProcessor(Map<String, Weight> weights) { return new FetchSubPhaseProcessor() { + final Map<String, Scorer> matchingScorers = new HashMap<>(); + + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + matchingScorers.clear(); + for (Map.Entry<String, Weight> entry : weights.entrySet()) { + ScorerSupplier scorerSupplier = entry.getValue().scorerSupplier(readerContext); + if (scorerSupplier != null) { + Scorer scorer = scorerSupplier.get(0L); + if (scorer != null) { + matchingScorers.put(entry.getKey(), scorer); + } + } + } + } + + @Override + public void process(HitContext hitContext) throws IOException { + Map<String, Float> matches = new LinkedHashMap<>(); + int docId = hitContext.docId(); + for (Map.Entry<String, Scorer> entry : matchingScorers.entrySet()) { + Scorer scorer = entry.getValue(); + if (scorer.iterator().docID() < docId) { + scorer.iterator().advance(docId); + } + if (scorer.iterator().docID() == docId) { + matches.put(entry.getKey(), scorer.score()); + } + } + hitContext.hit().matchedQueriesWithScores(matches); + } + }; + } - final Map<String, Bits> matchingIterators = new HashMap<>(); + private FetchSubPhaseProcessor createNonScoringProcessor(Map<String, Weight> weights) { + return new FetchSubPhaseProcessor() { + final Map<String, Bits> matchingBits = new HashMap<>(); @Override public void setNextReader(LeafReaderContext readerContext) throws IOException { - matchingIterators.clear(); + matchingBits.clear(); for (Map.Entry<String, Weight> entry : weights.entrySet()) { - ScorerSupplier ss = entry.getValue().scorerSupplier(readerContext); - if (ss != null) { - Bits matchingBits = Lucene.asSequentialAccessBits(readerContext.reader().maxDoc(), ss); - matchingIterators.put(entry.getKey(), matchingBits); + ScorerSupplier scorerSupplier = entry.getValue().scorerSupplier(readerContext); + if (scorerSupplier != null) { + Bits bits = Lucene.asSequentialAccessBits(readerContext.reader().maxDoc(), scorerSupplier); + matchingBits.put(entry.getKey(), bits); } } } @@ -93,15 +138,14 @@ public void setNextReader(LeafReaderContext readerContext) throws IOException { @Override public void process(HitContext hitContext) { List<String> matches = new ArrayList<>(); - int doc = hitContext.docId(); - for (Map.Entry<String, Bits> iterator : matchingIterators.entrySet()) { - if (iterator.getValue().get(doc)) { - matches.add(iterator.getKey()); + int docId = hitContext.docId(); + for (Map.Entry<String, Bits> entry : matchingBits.entrySet()) { + if (entry.getValue().get(docId)) { + matches.add(entry.getKey()); } } hitContext.hit().matchedQueries(matches.toArray(new String[0])); } }; } - } diff --git a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java index 151ef97a2a141..3a3b46366a6d2 100644 --- a/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/FilteredSearchContext.java @@ -340,6 +340,14 @@ public FieldDoc searchAfter() { return in.searchAfter(); } + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + return in.includeNamedQueriesScore(includeNamedQueriesScore); + } + + public boolean includeNamedQueriesScore() { + return in.includeNamedQueriesScore(); + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { return in.parsedPostFilter(postFilter); diff --git a/server/src/main/java/org/opensearch/search/internal/SearchContext.java b/server/src/main/java/org/opensearch/search/internal/SearchContext.java index 02837da64dafd..cd8f9f8410d50 100644 --- a/server/src/main/java/org/opensearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SearchContext.java @@ -305,6 +305,29 @@ public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { public abstract boolean trackScores(); + /** + * Determines whether named queries' scores should be included in the search results. + * By default, this is set to return false, indicating that scores from named queries are not included. + * + * @param includeNamedQueriesScore true to include scores from named queries, false otherwise. + */ + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + // Default implementation does nothing and returns this for chaining. + // Implementations of SearchContext should override this method to actually store the value. + return this; + } + + /** + * Checks if scores from named queries are included in the search results. + * + * @return true if scores from named queries are included, false otherwise. + */ + public boolean includeNamedQueriesScore() { + // Default implementation returns false. + // Implementations of SearchContext should override this method to return the actual value. + return false; + } + public abstract SearchContext trackTotalHitsUpTo(int trackTotalHits); /** diff --git a/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java b/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java index 55315013ea8c9..b2c97baf78d91 100644 --- a/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/opensearch/search/internal/SubSearchContext.java @@ -82,6 +82,8 @@ public class SubSearchContext extends FilteredSearchContext { private boolean explain; private boolean trackScores; + + private boolean includeNamedQueriesScore; private boolean version; private boolean seqNoAndPrimaryTerm; @@ -234,6 +236,17 @@ public boolean trackScores() { return trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext parsedPostFilter(ParsedQuery postFilter) { throw new UnsupportedOperationException("Not supported"); diff --git a/server/src/test/java/org/opensearch/search/SearchHitTests.java b/server/src/test/java/org/opensearch/search/SearchHitTests.java index 88d5fb38a6cb1..13b4d9f976ed5 100644 --- a/server/src/test/java/org/opensearch/search/SearchHitTests.java +++ b/server/src/test/java/org/opensearch/search/SearchHitTests.java @@ -56,11 +56,13 @@ import org.opensearch.test.AbstractWireSerializingTestCase; import org.opensearch.test.RandomObjects; import org.opensearch.test.VersionUtils; +import org.junit.Assert; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Predicate; @@ -76,6 +78,25 @@ import static org.hamcrest.Matchers.nullValue; public class SearchHitTests extends AbstractWireSerializingTestCase<SearchHit> { + + private Map<String, Float> getSampleMatchedQueries() { + Map<String, Float> matchedQueries = new LinkedHashMap<>(); + matchedQueries.put("query1", 1.0f); + matchedQueries.put("query2", 0.5f); + return matchedQueries; + } + + public static SearchHit createTestItemWithMatchedQueriesScores(boolean withOptionalInnerHits, boolean withShardTarget) { + var searchHit = createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); + int size = randomIntBetween(1, 5); // Ensure at least one matched query + Map<String, Float> matchedQueries = new LinkedHashMap<>(size); + for (int i = 0; i < size; i++) { + matchedQueries.put(randomAlphaOfLength(5), randomFloat()); + } + searchHit.matchedQueriesWithScores(matchedQueries); + return searchHit; + } + public static SearchHit createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) { return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget); } @@ -129,11 +150,11 @@ public static SearchHit createTestItem(final MediaType mediaType, boolean withOp } if (randomBoolean()) { int size = randomIntBetween(0, 5); - String[] matchedQueries = new String[size]; + Map<String, Float> matchedQueries = new LinkedHashMap<>(size); for (int i = 0; i < size; i++) { - matchedQueries[i] = randomAlphaOfLength(5); + matchedQueries.put(randomAlphaOfLength(5), Float.NaN); } - hit.matchedQueries(matchedQueries); + hit.matchedQueriesWithScores(matchedQueries); } if (randomBoolean()) { hit.explanation(createExplanation(randomIntBetween(0, 5))); @@ -219,6 +240,21 @@ public void testFromXContentLenientParsing() throws IOException { assertToXContentEquivalent(originalBytes, toXContent(parsed, xContentType, true), xContentType); } + public void testSerializationDeserializationWithMatchedQueriesScores() throws IOException { + SearchHit searchHit = createTestItemWithMatchedQueriesScores(true, true); + SearchHit deserializedSearchHit = copyWriteable(searchHit, getNamedWriteableRegistry(), SearchHit::new, Version.V_3_0_0); + assertEquals(searchHit, deserializedSearchHit); + assertEquals(searchHit.getMatchedQueriesAndScores(), deserializedSearchHit.getMatchedQueriesAndScores()); + } + + public void testSerializationDeserializationWithMatchedQueriesList() throws IOException { + SearchHit searchHit = createTestItem(true, true); + SearchHit deserializedSearchHit = copyWriteable(searchHit, getNamedWriteableRegistry(), SearchHit::new, Version.V_2_12_0); + assertEquals(searchHit, deserializedSearchHit); + assertEquals(searchHit.getMatchedQueriesAndScores(), deserializedSearchHit.getMatchedQueriesAndScores()); + Assert.assertArrayEquals(searchHit.getMatchedQueries(), deserializedSearchHit.getMatchedQueries()); + } + /** * When e.g. with "stored_fields": "_none_", only "_index" and "_score" are returned. */ @@ -244,6 +280,125 @@ public void testToXContent() throws IOException { assertEquals("{\"_id\":\"id1\",\"_score\":1.5}", builder.toString()); } + public void testSerializeShardTargetWithNewVersion() throws Exception { + String clusterAlias = randomBoolean() ? null : "cluster_alias"; + SearchShardTarget target = new SearchShardTarget( + "_node_id", + new ShardId(new Index("_index", "_na_"), 0), + clusterAlias, + OriginalIndices.NONE + ); + + Map<String, SearchHits> innerHits = new HashMap<>(); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); + innerHit1.shard(target); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); + innerInnerHit2.shard(target); + innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHit1.setInnerHits(innerHits); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); + innerHit2.shard(target); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); + innerHit3.shard(target); + + innerHits = new HashMap<>(); + SearchHit hit1 = new SearchHit(0, "_id", null, null); + innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + hit1.shard(target); + hit1.setInnerHits(innerHits); + + SearchHit hit2 = new SearchHit(0, "_id", null, null); + hit2.shard(target); + + SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); + + SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, Version.V_3_0_0); + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); + for (SearchHit hit : results) { + assertEquals(clusterAlias, hit.getClusterAlias()); + if (hit.getInnerHits() != null) { + for (SearchHits innerhits : hit.getInnerHits().values()) { + for (SearchHit innerHit : innerhits) { + assertEquals(clusterAlias, innerHit.getClusterAlias()); + } + } + } + } + assertThat(results.getAt(1).getShard(), equalTo(target)); + } + + public void testSerializeShardTargetWithNewVersionAndMatchedQueries() throws Exception { + String clusterAlias = randomBoolean() ? null : "cluster_alias"; + SearchShardTarget target = new SearchShardTarget( + "_node_id", + new ShardId(new Index("_index", "_na_"), 0), + clusterAlias, + OriginalIndices.NONE + ); + + Map<String, SearchHits> innerHits = new HashMap<>(); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); + innerHit1.shard(target); + innerHit1.matchedQueriesWithScores(getSampleMatchedQueries()); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); + innerInnerHit2.shard(target); + innerHits.put("1", new SearchHits(new SearchHit[] { innerInnerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHit1.setInnerHits(innerHits); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); + innerHit2.shard(target); + innerHit2.matchedQueriesWithScores(getSampleMatchedQueries()); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); + innerHit3.shard(target); + innerHit3.matchedQueriesWithScores(getSampleMatchedQueries()); + + innerHits = new HashMap<>(); + SearchHit hit1 = new SearchHit(0, "_id", null, null); + innerHits.put("1", new SearchHits(new SearchHit[] { innerHit1, innerHit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + innerHits.put("2", new SearchHits(new SearchHit[] { innerHit3 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); + hit1.shard(target); + hit1.setInnerHits(innerHits); + + SearchHit hit2 = new SearchHit(0, "_id", null, null); + hit2.shard(target); + + SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); + + SearchHits results = copyWriteable(hits, getNamedWriteableRegistry(), SearchHits::new, Version.V_3_0_0); + SearchShardTarget deserializedTarget = results.getAt(0).getShard(); + assertThat(deserializedTarget, equalTo(target)); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(0).getInnerHits().get("1").getAt(0).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("1").getAt(1).getShard(), notNullValue()); + assertThat(results.getAt(0).getInnerHits().get("2").getAt(0).getShard(), notNullValue()); + String[] expectedMatchedQueries = new String[] { "query1", "query2" }; + String[] actualMatchedQueries = results.getAt(0).getInnerHits().get("1").getAt(0).getMatchedQueries(); + assertArrayEquals(expectedMatchedQueries, actualMatchedQueries); + + Map<String, Float> expectedMatchedQueriesAndScores = new LinkedHashMap<>(); + expectedMatchedQueriesAndScores.put("query1", 1.0f); + expectedMatchedQueriesAndScores.put("query2", 0.5f); + + Map<String, Float> actualMatchedQueriesAndScores = results.getAt(0).getInnerHits().get("1").getAt(0).getMatchedQueriesAndScores(); + assertEquals(expectedMatchedQueriesAndScores, actualMatchedQueriesAndScores); + for (SearchHit hit : results) { + assertEquals(clusterAlias, hit.getClusterAlias()); + if (hit.getInnerHits() != null) { + for (SearchHits innerhits : hit.getInnerHits().values()) { + for (SearchHit innerHit : innerhits) { + assertEquals(clusterAlias, innerHit.getClusterAlias()); + } + } + } + } + assertThat(results.getAt(1).getShard(), equalTo(target)); + } + public void testSerializeShardTarget() throws Exception { String clusterAlias = randomBoolean() ? null : "cluster_alias"; SearchShardTarget target = new SearchShardTarget( diff --git a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java index 2fb345f73fb06..09a72dcdc3641 100644 --- a/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/opensearch/test/TestSearchContext.java @@ -107,6 +107,7 @@ public class TestSearchContext extends SearchContext { SearchShardTask task; SortAndFormats sort; boolean trackScores = false; + boolean includeNamedQueriesScore = false; int trackTotalHitsUpTo = SearchContext.DEFAULT_TRACK_TOTAL_HITS_UP_TO; ContextIndexSearcher searcher; @@ -409,6 +410,17 @@ public boolean trackScores() { return trackScores; } + @Override + public SearchContext includeNamedQueriesScore(boolean includeNamedQueriesScore) { + this.includeNamedQueriesScore = includeNamedQueriesScore; + return this; + } + + @Override + public boolean includeNamedQueriesScore() { + return includeNamedQueriesScore; + } + @Override public SearchContext trackTotalHitsUpTo(int trackTotalHitsUpTo) { this.trackTotalHitsUpTo = trackTotalHitsUpTo; diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java index 183214c159c14..9b0de13c35ec8 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchAssertions.java @@ -528,6 +528,10 @@ public static Matcher<SearchHit> hasScore(final float score) { return new OpenSearchMatchers.SearchHitHasScoreMatcher(score); } + public static Matcher<SearchHit> hasMatchedQueries(final String[] matchedQueries) { + return new OpenSearchMatchers.SearchHitMatchedQueriesMatcher(matchedQueries); + } + public static <T, V> CombinableMatcher<T> hasProperty(Function<? super T, ? extends V> property, Matcher<V> valueMatcher) { return OpenSearchMatchers.HasPropertyLambdaMatcher.hasProperty(property, valueMatcher); } diff --git a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java index 5889b7e269ed2..2be94bd53e3c1 100644 --- a/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java +++ b/test/framework/src/main/java/org/opensearch/test/hamcrest/OpenSearchMatchers.java @@ -38,6 +38,7 @@ import org.hamcrest.TypeSafeMatcher; import org.hamcrest.core.CombinableMatcher; +import java.util.Arrays; import java.util.function.Function; public class OpenSearchMatchers { @@ -111,6 +112,35 @@ public void describeTo(final Description description) { } } + public static class SearchHitMatchedQueriesMatcher extends TypeSafeMatcher<SearchHit> { + private String[] matchedQueries; + + public SearchHitMatchedQueriesMatcher(String[] matchedQueries) { + this.matchedQueries = matchedQueries; + } + + @Override + protected boolean matchesSafely(SearchHit searchHit) { + String[] searchHitQueries = searchHit.getMatchedQueries(); + if (matchedQueries == null) { + return false; + } + Arrays.sort(searchHitQueries); + Arrays.sort(matchedQueries); + return Arrays.equals(searchHitQueries, matchedQueries); + } + + @Override + public void describeMismatchSafely(final SearchHit searchHit, final Description mismatchDescription) { + mismatchDescription.appendText(" matched queries were ").appendValue(Arrays.toString(searchHit.getMatchedQueries())); + } + + @Override + public void describeTo(final Description description) { + description.appendText("searchHit matched queries should be ").appendValue(Arrays.toString(matchedQueries)); + } + } + public static class HasPropertyLambdaMatcher<T, V> extends FeatureMatcher<T, V> { private final Function<? super T, ? extends V> property;