From 455fad34eea2cb6d606f54a41ef170066b87b04e Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Tue, 12 Apr 2022 14:58:19 -0700 Subject: [PATCH 01/41] Change deprecation message for API parameter value 'master_node' of parameter 'metric' (#2880) Signed-off-by: Tianli Feng --- .../resources/rest-api-spec/test/cluster.state/20_filtering.yml | 2 +- .../rest/action/admin/cluster/RestClusterRerouteAction.java | 2 +- .../rest/action/admin/cluster/RestClusterStateAction.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml index b2c1e1e561933..b17201a911290 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.state/20_filtering.yml @@ -169,7 +169,7 @@ setup: cluster.state: metric: [ master_node, version ] allowed_warnings: - - 'Parameter [master_timeout] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_timeout] instead.' + - 'Assigning [master_node] to parameter [metric] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_node] instead.' - match: { cluster_uuid: $cluster_uuid } - is_true: master_node diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java index 9c0e09b7629e0..dc5e0ba57e4db 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterRerouteAction.java @@ -83,7 +83,7 @@ public RestClusterRerouteAction(SettingsFilter settingsFilter) { // It's used to log deprecation when request parameter 'metric' contains 'master_node', or request parameter 'master_timeout' is used. private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterRerouteAction.class); private static final String DEPRECATED_MESSAGE_MASTER_NODE = - "Deprecated value [master_node] used for parameter [metric]. To promote inclusive language, please use [cluster_manager_node] instead. It will be unsupported in a future major version."; + "Assigning [master_node] to parameter [metric] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_node] instead."; @Override public List routes() { diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java index 7f18a19b5cd54..02b1eaa741c9e 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestClusterStateAction.java @@ -76,7 +76,7 @@ public RestClusterStateAction(SettingsFilter settingsFilter) { // It's used to log deprecation when request parameter 'metric' contains 'master_node', or request parameter 'master_timeout' is used. private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestClusterStateAction.class); private static final String DEPRECATED_MESSAGE_MASTER_NODE = - "Deprecated value [master_node] used for parameter [metric]. To promote inclusive language, please use [cluster_manager_node] instead. It will be unsupported in a future major version."; + "Assigning [master_node] to parameter [metric] is deprecated and will be removed in 3.0. To support inclusive language, please use [cluster_manager_node] instead."; @Override public String getName() { From 135177e28ed2b65f0b8c5c30bfb0b7857ac241b1 Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Wed, 13 Apr 2022 11:09:30 -0700 Subject: [PATCH 02/41] Remove unused file x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 (#2869) Signed-off-by: Tianli Feng --- x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 | 1 - 1 file changed, 1 deletion(-) delete mode 100644 x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 diff --git a/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 b/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 deleted file mode 100644 index f40f0242448e8..0000000000000 --- a/x-pack/plugin/core/licenses/commons-logging-1.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4bfc12adfe4842bf07b657f0369c4cb522955686 \ No newline at end of file From 3af4300c3f62b23eb1368cd3354677ed7d8b0737 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 13 Apr 2022 14:13:18 -0400 Subject: [PATCH 03/41] Decouple IndexSettings from IncludeExclude (#2860) (#2861) This change refactors an earlier change to impose a reg-ex size limit on the include/exclude string. Instead of accepting an IndexSettings instance, the class now accepts a integer limit value. This is necessary because the IncludeExclude class is used outside the core codebase, whose use-cases may be unaware of indices and their settings. To ensure that a limit is always imposed, a default limit is defined in the class. (cherry picked from commit ba1966853e728b153e42be59ba449420e79b09ee) Signed-off-by: Kartik Ganesh Co-authored-by: Kartik Ganesh --- .../bucket/terms/IncludeExclude.java | 41 +++++++++++++++---- .../terms/RareTermsAggregatorFactory.java | 5 +-- .../SignificantTermsAggregatorFactory.java | 9 ++-- .../SignificantTextAggregatorFactory.java | 5 +-- .../bucket/terms/TermsAggregatorFactory.java | 9 ++-- .../support/IncludeExcludeTests.java | 25 +++-------- 6 files changed, 50 insertions(+), 44 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java index 71320909ca5d2..f32abd1892592 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -79,6 +79,14 @@ public class IncludeExclude implements Writeable, ToXContentFragment { // can disagree on which terms hash to the required partition. private static final int HASH_PARTITIONING_SEED = 31; + /** + * The default length limit for a reg-ex string. The value is derived from {@link IndexSettings#MAX_REGEX_LENGTH_SETTING}. + * For context, see: + * https://github.com/opensearch-project/OpenSearch/issues/1992 + * https://github.com/opensearch-project/OpenSearch/issues/2858 + */ + private static final int DEFAULT_MAX_REGEX_LENGTH = 1000; + // for parsing purposes only // TODO: move all aggs to the same package so that this stuff could be pkg-private public static IncludeExclude merge(IncludeExclude include, IncludeExclude exclude) { @@ -576,10 +584,10 @@ public boolean isPartitionBased() { return incNumPartitions > 0; } - private Automaton toAutomaton(IndexSettings indexSettings) { + private Automaton toAutomaton(int maxRegExLength) { Automaton a; if (include != null) { - validateRegExpStringLength(include, indexSettings); + validateRegExpStringLength(include, maxRegExLength); a = new RegExp(include).toAutomaton(); } else if (includeValues != null) { a = Automata.makeStringUnion(includeValues); @@ -587,7 +595,7 @@ private Automaton toAutomaton(IndexSettings indexSettings) { a = Automata.makeAnyString(); } if (exclude != null) { - validateRegExpStringLength(exclude, indexSettings); + validateRegExpStringLength(exclude, maxRegExLength); Automaton excludeAutomaton = new RegExp(exclude).toAutomaton(); a = Operations.minus(a, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } else if (excludeValues != null) { @@ -596,8 +604,7 @@ private Automaton toAutomaton(IndexSettings indexSettings) { return a; } - private static void validateRegExpStringLength(String source, IndexSettings indexSettings) { - int maxRegexLength = indexSettings.getMaxRegexLength(); + private static void validateRegExpStringLength(String source, int maxRegexLength) { if (maxRegexLength > 0 && source.length() > maxRegexLength) { throw new IllegalArgumentException( "The length of regex [" @@ -613,9 +620,17 @@ private static void validateRegExpStringLength(String source, IndexSettings inde } } - public StringFilter convertToStringFilter(DocValueFormat format, IndexSettings indexSettings) { + /** + * Wrapper method that imposes a default regex limit. + * See https://github.com/opensearch-project/OpenSearch/issues/2858 + */ + public StringFilter convertToStringFilter(DocValueFormat format) { + return convertToStringFilter(format, DEFAULT_MAX_REGEX_LENGTH); + } + + public StringFilter convertToStringFilter(DocValueFormat format, int maxRegexLength) { if (isRegexBased()) { - return new AutomatonBackedStringFilter(toAutomaton(indexSettings)); + return new AutomatonBackedStringFilter(toAutomaton(maxRegexLength)); } if (isPartitionBased()) { return new PartitionedStringFilter(); @@ -636,10 +651,18 @@ private static SortedSet parseForDocValues(SortedSet endUser return result; } - public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, IndexSettings indexSettings) { + /** + * Wrapper method that imposes a default regex limit. + * See https://github.com/opensearch-project/OpenSearch/issues/2858 + */ + public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) { + return convertToOrdinalsFilter(format, DEFAULT_MAX_REGEX_LENGTH); + } + + public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format, int maxRegexLength) { if (isRegexBased()) { - return new AutomatonBackedOrdinalsFilter(toAutomaton(indexSettings)); + return new AutomatonBackedOrdinalsFilter(toAutomaton(maxRegexLength)); } if (isPartitionBased()) { return new PartitionedOrdinalsFilter(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java index c0a5c77a98170..ed70a9b310ea1 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/RareTermsAggregatorFactory.java @@ -34,7 +34,6 @@ import org.opensearch.common.ParseField; import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.Aggregator; @@ -251,10 +250,10 @@ Aggregator create( double precision, CardinalityUpperBound cardinality ) throws IOException { - IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + int maxRegexLength = context.getQueryShardContext().getIndexSettings().getMaxRegexLength(); final IncludeExclude.StringFilter filter = includeExclude == null ? null - : includeExclude.convertToStringFilter(format, indexSettings); + : includeExclude.convertToStringFilter(format, maxRegexLength); return new StringRareTermsAggregator( name, factories, diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java index 4b93121ae06ef..8935c9715a1e0 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTermsAggregatorFactory.java @@ -34,7 +34,6 @@ import org.opensearch.common.ParseField; import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; @@ -326,10 +325,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings(); + int maxRegexLength = aggregationContext.getQueryShardContext().getIndexSettings().getMaxRegexLength(); final IncludeExclude.StringFilter filter = includeExclude == null ? null - : includeExclude.convertToStringFilter(format, indexSettings); + : includeExclude.convertToStringFilter(format, maxRegexLength); return new MapStringTermsAggregator( name, factories, @@ -367,10 +366,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - IndexSettings indexSettings = aggregationContext.getQueryShardContext().getIndexSettings(); + int maxRegexLength = aggregationContext.getQueryShardContext().getIndexSettings().getMaxRegexLength(); final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null - : includeExclude.convertToOrdinalsFilter(format, indexSettings); + : includeExclude.convertToOrdinalsFilter(format, maxRegexLength); boolean remapGlobalOrd = true; if (cardinality == CardinalityUpperBound.ONE && factories == AggregatorFactories.EMPTY && includeExclude == null) { /* diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java index 992035f1fbe97..9085df1ccd749 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/SignificantTextAggregatorFactory.java @@ -44,7 +44,6 @@ import org.opensearch.common.util.BigArrays; import org.opensearch.common.util.BytesRefHash; import org.opensearch.common.util.ObjectArray; -import org.opensearch.index.IndexSettings; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryShardContext; @@ -138,10 +137,10 @@ protected Aggregator createInternal( // TODO - need to check with mapping that this is indeed a text field.... - IndexSettings indexSettings = searchContext.getQueryShardContext().getIndexSettings(); + int maxRegexLength = searchContext.getQueryShardContext().getIndexSettings().getMaxRegexLength(); IncludeExclude.StringFilter incExcFilter = includeExclude == null ? null - : includeExclude.convertToStringFilter(DocValueFormat.RAW, indexSettings); + : includeExclude.convertToStringFilter(DocValueFormat.RAW, maxRegexLength); MapStringTermsAggregator.CollectorSource collectorSource = new SignificantTextCollectorSource( queryShardContext.lookup().source(), diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java index 17b412f87107c..b320126dff24b 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java @@ -34,7 +34,6 @@ import org.apache.lucene.search.IndexSearcher; import org.opensearch.common.ParseField; -import org.opensearch.index.IndexSettings; import org.opensearch.index.query.QueryShardContext; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.AggregationExecutionException; @@ -381,10 +380,10 @@ Aggregator create( CardinalityUpperBound cardinality, Map metadata ) throws IOException { - IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + int maxRegexLength = context.getQueryShardContext().getIndexSettings().getMaxRegexLength(); final IncludeExclude.StringFilter filter = includeExclude == null ? null - : includeExclude.convertToStringFilter(format, indexSettings); + : includeExclude.convertToStringFilter(format, maxRegexLength); return new MapStringTermsAggregator( name, factories, @@ -462,10 +461,10 @@ Aggregator create( ); } - IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings(); + int maxRegexLength = context.getQueryShardContext().getIndexSettings().getMaxRegexLength(); final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null - : includeExclude.convertToOrdinalsFilter(format, indexSettings); + : includeExclude.convertToOrdinalsFilter(format, maxRegexLength); boolean remapGlobalOrds; if (cardinality == CardinalityUpperBound.ONE && REMAP_GLOBAL_ORDS != null) { /* diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java index d104fc6783dc5..d0995abd07f32 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/IncludeExcludeTests.java @@ -36,16 +36,12 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; -import org.opensearch.Version; -import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.ParseField; -import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.ToXContent; import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.IndexSettings; import org.opensearch.index.fielddata.AbstractSortedSetDocValues; import org.opensearch.search.DocValueFormat; import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; @@ -58,23 +54,14 @@ public class IncludeExcludeTests extends OpenSearchTestCase { - private final IndexSettings dummyIndexSettings = new IndexSettings( - IndexMetadata.builder("index") - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(), - Settings.EMPTY - ); - public void testEmptyTermsWithOrds() throws IOException { IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); inexcl = new IncludeExclude(null, new TreeSet<>(Collections.singleton(new BytesRef("foo")))); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); acceptedOrds = filter.acceptedGlobalOrdinals(DocValues.emptySortedSet()); assertEquals(0, acceptedOrds.length()); } @@ -113,13 +100,13 @@ public long getValueCount() { }; IncludeExclude inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("foo"))), null); - OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + OrdinalsFilter filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); LongBitSet acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertTrue(acceptedOrds.get(0)); inexcl = new IncludeExclude(new TreeSet<>(Collections.singleton(new BytesRef("bar"))), null); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -128,7 +115,7 @@ public long getValueCount() { new TreeSet<>(Collections.singleton(new BytesRef("foo"))), new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); @@ -137,7 +124,7 @@ public long getValueCount() { null, // means everything included new TreeSet<>(Collections.singleton(new BytesRef("foo"))) ); - filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW, dummyIndexSettings); + filter = inexcl.convertToOrdinalsFilter(DocValueFormat.RAW); acceptedOrds = filter.acceptedGlobalOrdinals(ords); assertEquals(1, acceptedOrds.length()); assertFalse(acceptedOrds.get(0)); From 08e4a358399d5d931666fed8471ac00bcdec8a61 Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Wed, 13 Apr 2022 11:13:37 -0700 Subject: [PATCH 04/41] Add request parameter 'cluster_manager_timeout' and deprecate 'master_timeout' - in Ingest APIs and Script APIs (#2682) - Deprecate the request parameter `master_timeout` that used in Ingest APIs and Script APIs which have got the parameter. - Add alternative new request parameter `cluster_manager_timeout`. - Add unit tests. Signed-off-by: Tianli Feng --- .../rest-api-spec/api/delete_script.json | 10 +++- .../rest-api-spec/api/get_script.json | 10 +++- .../api/ingest.delete_pipeline.json | 10 +++- .../api/ingest.get_pipeline.json | 10 +++- .../api/ingest.put_pipeline.json | 10 +++- .../rest-api-spec/api/put_script.json | 10 +++- .../cluster/RestDeleteStoredScriptAction.java | 8 ++- .../cluster/RestGetStoredScriptAction.java | 6 +- .../cluster/RestPutStoredScriptAction.java | 6 +- .../ingest/RestDeletePipelineAction.java | 6 +- .../action/ingest/RestGetPipelineAction.java | 6 +- .../action/ingest/RestPutPipelineAction.java | 6 +- .../RenamedTimeoutRequestParameterTests.java | 59 +++++++++++++++++++ 13 files changed, 145 insertions(+), 12 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json index b38b97ae57c2e..acaa389738606 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_script.json @@ -28,7 +28,15 @@ }, "master_timeout":{ "type":"time", - "description":"Specify timeout for connection to master" + "description":"Specify timeout for connection to master", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Specify timeout for connection to cluster-manager node" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json index 14307bea2ef0b..9cdac886b1b27 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json @@ -24,7 +24,15 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Specify timeout for connection to master" + "description":"Specify timeout for connection to master", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Specify timeout for connection to cluster-manager node" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json index 29b4219038cd2..3e40136f556fa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.delete_pipeline.json @@ -24,7 +24,15 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Explicit operation timeout for connection to master node", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to cluster-manager node" }, "timeout":{ "type":"time", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json index 65fc4f91b2b42..cde980e67c8c9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.get_pipeline.json @@ -30,7 +30,15 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Explicit operation timeout for connection to master node", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to cluster-manager node" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json index 4d2105866791c..5475905e7b99f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ingest.put_pipeline.json @@ -24,7 +24,15 @@ "params":{ "master_timeout":{ "type":"time", - "description":"Explicit operation timeout for connection to master node" + "description":"Explicit operation timeout for connection to master node", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to cluster-manager node" }, "timeout":{ "type":"time", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json index 750f7fdf4eb62..c8413d1476402 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/put_script.json @@ -46,7 +46,15 @@ }, "master_timeout":{ "type":"time", - "description":"Specify timeout for connection to master" + "description":"Specify timeout for connection to master", + "deprecated":{ + "version":"2.0.0", + "description":"To support inclusive language, use 'cluster_manager_timeout' instead." + } + }, + "cluster_manager_timeout":{ + "type":"time", + "description":"Specify timeout for connection to cluster-manager node" }, "context":{ "type":"string", diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java index 8703899d5ed14..b303f769d216b 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestDeleteStoredScriptAction.java @@ -33,6 +33,7 @@ import org.opensearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.opensearch.client.node.NodeClient; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; @@ -45,6 +46,8 @@ public class RestDeleteStoredScriptAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeleteStoredScriptAction.class); + @Override public List routes() { return singletonList(new Route(DELETE, "/_scripts/{id}")); @@ -60,7 +63,10 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client String id = request.param("id"); DeleteStoredScriptRequest deleteStoredScriptRequest = new DeleteStoredScriptRequest(id); deleteStoredScriptRequest.timeout(request.paramAsTime("timeout", deleteStoredScriptRequest.timeout())); - deleteStoredScriptRequest.masterNodeTimeout(request.paramAsTime("master_timeout", deleteStoredScriptRequest.masterNodeTimeout())); + deleteStoredScriptRequest.masterNodeTimeout( + request.paramAsTime("cluster_manager_timeout", deleteStoredScriptRequest.masterNodeTimeout()) + ); + parseDeprecatedMasterTimeoutParameter(deleteStoredScriptRequest, request, deprecationLogger, getName()); return channel -> client.admin().cluster().deleteStoredScript(deleteStoredScriptRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index b75fb7693f865..5a904b99be469 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -33,6 +33,7 @@ import org.opensearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.opensearch.client.node.NodeClient; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestStatusToXContentListener; @@ -45,6 +46,8 @@ public class RestGetStoredScriptAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetStoredScriptAction.class); + @Override public List routes() { return singletonList(new Route(GET, "/_scripts/{id}")); @@ -59,7 +62,8 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); - getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout())); + getRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", getRequest.masterNodeTimeout())); + parseDeprecatedMasterTimeoutParameter(getRequest, request, deprecationLogger, getName()); return channel -> client.admin().cluster().getStoredScript(getRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutStoredScriptAction.java b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutStoredScriptAction.java index f4fe21b8adbe0..1568a80278bb9 100644 --- a/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutStoredScriptAction.java +++ b/server/src/main/java/org/opensearch/rest/action/admin/cluster/RestPutStoredScriptAction.java @@ -34,6 +34,7 @@ import org.opensearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -50,6 +51,8 @@ public class RestPutStoredScriptAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutStoredScriptAction.class); + @Override public List routes() { return unmodifiableList( @@ -76,7 +79,8 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client StoredScriptSource source = StoredScriptSource.parse(content, xContentType); PutStoredScriptRequest putRequest = new PutStoredScriptRequest(id, context, content, request.getXContentType(), source); - putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); + putRequest.masterNodeTimeout(request.paramAsTime("cluster_manager_timeout", putRequest.masterNodeTimeout())); + parseDeprecatedMasterTimeoutParameter(putRequest, request, deprecationLogger, getName()); putRequest.timeout(request.paramAsTime("timeout", putRequest.timeout())); return channel -> client.admin().cluster().putStoredScript(putRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/opensearch/rest/action/ingest/RestDeletePipelineAction.java b/server/src/main/java/org/opensearch/rest/action/ingest/RestDeletePipelineAction.java index 179736b4b1816..69f9316bc3d9c 100644 --- a/server/src/main/java/org/opensearch/rest/action/ingest/RestDeletePipelineAction.java +++ b/server/src/main/java/org/opensearch/rest/action/ingest/RestDeletePipelineAction.java @@ -34,6 +34,7 @@ import org.opensearch.action.ingest.DeletePipelineRequest; import org.opensearch.client.node.NodeClient; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestToXContentListener; @@ -45,6 +46,8 @@ import static org.opensearch.rest.RestRequest.Method.DELETE; public class RestDeletePipelineAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestDeletePipelineAction.class); + @Override public List routes() { return singletonList(new Route(DELETE, "/_ingest/pipeline/{id}")); @@ -58,7 +61,8 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { DeletePipelineRequest request = new DeletePipelineRequest(restRequest.param("id")); - request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + request.masterNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.masterNodeTimeout())); + parseDeprecatedMasterTimeoutParameter(request, restRequest, deprecationLogger, getName()); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); return channel -> client.admin().cluster().deletePipeline(request, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/opensearch/rest/action/ingest/RestGetPipelineAction.java b/server/src/main/java/org/opensearch/rest/action/ingest/RestGetPipelineAction.java index cf86541ca8cd9..5555bf53a5ee9 100644 --- a/server/src/main/java/org/opensearch/rest/action/ingest/RestGetPipelineAction.java +++ b/server/src/main/java/org/opensearch/rest/action/ingest/RestGetPipelineAction.java @@ -35,6 +35,7 @@ import org.opensearch.action.ingest.GetPipelineRequest; import org.opensearch.client.node.NodeClient; import org.opensearch.common.Strings; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.RestStatusToXContentListener; @@ -48,6 +49,8 @@ public class RestGetPipelineAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestGetPipelineAction.class); + @Override public List routes() { return unmodifiableList(asList(new Route(GET, "/_ingest/pipeline"), new Route(GET, "/_ingest/pipeline/{id}"))); @@ -61,7 +64,8 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { GetPipelineRequest request = new GetPipelineRequest(Strings.splitStringByCommaToArray(restRequest.param("id"))); - request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + request.masterNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.masterNodeTimeout())); + parseDeprecatedMasterTimeoutParameter(request, restRequest, deprecationLogger, getName()); return channel -> client.admin().cluster().getPipeline(request, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/opensearch/rest/action/ingest/RestPutPipelineAction.java b/server/src/main/java/org/opensearch/rest/action/ingest/RestPutPipelineAction.java index 09f40c962dda7..8a9abc860fbc9 100644 --- a/server/src/main/java/org/opensearch/rest/action/ingest/RestPutPipelineAction.java +++ b/server/src/main/java/org/opensearch/rest/action/ingest/RestPutPipelineAction.java @@ -36,6 +36,7 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.bytes.BytesReference; import org.opensearch.common.collect.Tuple; +import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.xcontent.XContentType; import org.opensearch.rest.BaseRestHandler; import org.opensearch.rest.RestRequest; @@ -49,6 +50,8 @@ public class RestPutPipelineAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestPutPipelineAction.class); + @Override public List routes() { return singletonList(new Route(PUT, "/_ingest/pipeline/{id}")); @@ -63,7 +66,8 @@ public String getName() { public RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { Tuple sourceTuple = restRequest.contentOrSourceParam(); PutPipelineRequest request = new PutPipelineRequest(restRequest.param("id"), sourceTuple.v2(), sourceTuple.v1()); - request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); + request.masterNodeTimeout(restRequest.paramAsTime("cluster_manager_timeout", request.masterNodeTimeout())); + parseDeprecatedMasterTimeoutParameter(request, restRequest, deprecationLogger, getName()); request.timeout(restRequest.paramAsTime("timeout", request.timeout())); return channel -> client.admin().cluster().putPipeline(request, new RestToXContentListener<>(channel)); } diff --git a/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java b/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java index 86529d96573f8..648766681a377 100644 --- a/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java +++ b/server/src/test/java/org/opensearch/action/RenamedTimeoutRequestParameterTests.java @@ -63,6 +63,9 @@ import org.opensearch.rest.action.admin.cluster.RestRestoreSnapshotAction; import org.opensearch.rest.action.admin.cluster.RestSnapshotsStatusAction; import org.opensearch.rest.action.admin.cluster.RestVerifyRepositoryAction; +import org.opensearch.rest.action.admin.cluster.RestDeleteStoredScriptAction; +import org.opensearch.rest.action.admin.cluster.RestGetStoredScriptAction; +import org.opensearch.rest.action.admin.cluster.RestPutStoredScriptAction; import org.opensearch.rest.action.cat.RestAllocationAction; import org.opensearch.rest.action.cat.RestRepositoriesAction; import org.opensearch.rest.action.cat.RestThreadPoolAction; @@ -76,6 +79,9 @@ import org.opensearch.rest.action.cat.RestPendingClusterTasksAction; import org.opensearch.rest.action.cat.RestSegmentsAction; import org.opensearch.rest.action.cat.RestSnapshotAction; +import org.opensearch.rest.action.ingest.RestDeletePipelineAction; +import org.opensearch.rest.action.ingest.RestGetPipelineAction; +import org.opensearch.rest.action.ingest.RestPutPipelineAction; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.threadpool.TestThreadPool; @@ -612,6 +618,59 @@ public void testVerifyRepository() { assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); } + public void testDeletePipeline() { + FakeRestRequest request = new FakeRestRequest(); + request.params().put("cluster_manager_timeout", "1h"); + request.params().put("master_timeout", "3s"); + request.params().put("id", "test"); + RestDeletePipelineAction action = new RestDeletePipelineAction(); + Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client)); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); + } + + public void testGetPipeline() { + RestGetPipelineAction action = new RestGetPipelineAction(); + Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client)); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); + } + + public void testPutPipeline() { + FakeRestRequest request = getFakeRestRequestWithBody(); + request.params().put("cluster_manager_timeout", "2m"); + request.params().put("master_timeout", "3s"); + request.params().put("id", "test"); + RestPutPipelineAction action = new RestPutPipelineAction(); + Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(request, client)); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); + } + + public void testDeleteStoredScript() { + RestDeleteStoredScriptAction action = new RestDeleteStoredScriptAction(); + Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client)); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); + } + + public void testGetStoredScript() { + RestGetStoredScriptAction action = new RestGetStoredScriptAction(); + Exception e = assertThrows(OpenSearchParseException.class, () -> action.prepareRequest(getRestRequestWithBothParams(), client)); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE); + } + + public void testPutStoredScript() { + RestPutStoredScriptAction action = new RestPutStoredScriptAction(); + Exception e = assertThrows( + OpenSearchParseException.class, + () -> action.prepareRequest(getRestRequestWithBodyWithBothParams(), client) + ); + assertThat(e.getMessage(), containsString(DUPLICATE_PARAMETER_ERROR_MESSAGE)); + assertWarnings(MASTER_TIMEOUT_DEPRECATED_MESSAGE, "empty templates should no longer be used"); + } + private MasterNodeRequest getMasterNodeRequest() { return new MasterNodeRequest() { @Override From 3c5d997a765e24ffa32d35219fd5026cfb143a9d Mon Sep 17 00:00:00 2001 From: Owais Kazi Date: Wed, 13 Apr 2022 11:14:18 -0700 Subject: [PATCH 05/41] Added a new line linter (#2875) * Added linter to add new line Signed-off-by: Owais Kazi * Fixed new lines Signed-off-by: Owais Kazi * Ignore empty files Signed-off-by: Owais Kazi * Updated DEVELOPER GUIDE Signed-off-by: Owais Kazi * Renamed workflow file Signed-off-by: Owais Kazi * Fixed failing tests Signed-off-by: Owais Kazi --- .ci/dockerOnLinuxExclusions | 2 +- .github/workflows/code-hygiene.yml | 14 +++++ .github/workflows/delete_backport_branch.yml | 2 +- .github/workflows/links.yml | 2 +- .github/workflows/wrapper.yml | 2 +- .linelint.yml | 49 ++++++++++++++++++ DEVELOPER_GUIDE.md | 16 ++++++ README.md | 2 +- RELEASING.md | 2 +- SECURITY.md | 2 +- .../org.eclipse.core.resources.prefs | 2 +- .../src/main/resources/minimumGradleVersion | 2 +- .../testKit/testingConventions/build.gradle | 3 -- .../testingConventions/settings.gradle | 2 +- .../testKit/thirdPartyAudit/settings.gradle | 2 +- .../prepare_release_update_documentation.py | 1 - dev-tools/signoff-check.sh | 1 - .../resources/rest-api-spec/test/10_info.yml | 1 - .../resources/rest-api-spec/test/11_nodes.yml | 1 - distribution/src/bin/opensearch-env-from-file | 1 - distribution/src/bin/opensearch-env.bat | 1 - doc-tools/build.gradle | 1 - .../missingdoclet/MissingDoclet.class | Bin 0 -> 14156 bytes libs/dissect/build.gradle | 1 - .../test/resources/specification/tests.json | 2 +- libs/geo/build.gradle | 1 - libs/grok/src/main/resources/patterns/exim | 1 - libs/grok/src/main/resources/patterns/junos | 1 - .../src/main/resources/patterns/postgresql | 1 - libs/ssl-config/build.gradle | 1 - .../analysis/common/cjk_analysis.json | 2 +- .../analysis/common/pattern_capture.json | 2 +- .../analysis-common/60_analysis_scripting.yml | 1 - .../test/cluster.stats/10_analysis_stats.yml | 1 - .../test/indices.analyze/10_synonyms.yml | 1 - .../test/search.query/60_synonym_graph.yml | 1 - .../test/search.query/70_intervals.yml | 1 - .../test/ingest/220_drop_processor.yml | 1 - modules/ingest-user-agent/build.gradle | 1 - .../src/test/test-regexes.yml | 2 +- modules/lang-expression/build.gradle | 1 - .../src/main/antlr/PainlessLexer.g4 | 2 +- .../test/painless/100_terms_agg.yml | 1 - modules/opensearch-dashboards/build.gradle | 1 - modules/repository-url/build.gradle | 1 - modules/systemd/build.gradle | 1 - .../index/analysis/KeywordTokenizer.rbbi | 2 +- plugins/analysis-kuromoji/build.gradle | 1 - .../test/analysis_phonetic/10_metaphone.yml | 1 - .../analysis_phonetic/20_double_metaphone.yml | 1 - .../analysis_phonetic/30_beider_morse.yml | 1 - .../analysis_phonetic/50_daitch_mokotoff.yml | 1 - plugins/analysis-smartcn/build.gradle | 1 - .../attributes/google-compute-default-zone | 2 +- .../gce/computeMetadata/v1/project/project-id | 2 +- plugins/examples/build.gradle | 1 - plugins/examples/custom-settings/build.gradle | 1 - .../src/main/config/custom.yml | 2 +- plugins/examples/rest-handler/build.gradle | 1 - .../script-expert-scoring/build.gradle | 1 - .../test/sample-files/asciidoc.asciidoc | 1 - .../test/ingest_attachment/10_basic.yml | 1 - .../test/mapper_annotatedtext/10_basic.yml | 1 - .../test/hdfs_repository/30_snapshot.yml | 1 - .../secure_hdfs_repository/30_snapshot.yml | 1 - plugins/transport-nio/build.gradle | 1 - qa/die-with-dignity/build.gradle | 1 - .../org/opensearch/common/cli/tool-cmd1.help | 2 +- .../org/opensearch/common/cli/tool.help | 2 +- .../test/multi_cluster/70_skip_shards.yml | 1 - .../test/old_cluster/10_basic.yml | 1 - .../test/old_cluster/20_date_range.yml | 2 - .../test/upgraded_cluster/10_basic.yml | 2 - .../test/upgraded_cluster/20_date_range.yml | 1 - .../ingest_mustache/10_ingest_disabled.yml | 1 - .../50_script_processor_using_painless.yml | 1 - .../60_pipeline_timestamp_date_mapping.yml | 1 - .../test/resources/scripts/master.painless | 2 +- ...SmokeTestPluginsClientYamlTestSuiteIT.java | 1 - .../opensearch.release-notes-1.0.0-rc1.md | 2 - .../opensearch.release-notes-1.1.0.md | 2 - .../opensearch.release-notes-1.2.0.md | 1 - .../opensearch.release-notes-1.2.4.md | 2 - .../opensearch.release-notes-1.3.0.md | 2 - .../rest-api-spec/test/bulk/10_basic.yml | 1 - .../test/bulk/20_list_of_strings.yml | 1 - .../rest-api-spec/test/bulk/30_big_string.yml | 1 - .../rest-api-spec/test/bulk/40_source.yml | 1 - .../rest-api-spec/test/cat.nodes/10_basic.yml | 1 - .../test/cluster.pending_tasks/10_basic.yml | 1 - .../test/cluster.remote_info/10_info.yml | 1 - .../10_basic.yml | 1 - .../rest-api-spec/test/create/40_routing.yml | 1 - .../rest-api-spec/test/delete/30_routing.yml | 1 - .../test/get/15_default_values.yml | 1 - .../test/get/20_stored_fields.yml | 2 - .../rest-api-spec/test/get/40_routing.yml | 1 - .../rest-api-spec/test/get/90_versions.yml | 1 - .../rest-api-spec/test/index/40_routing.yml | 1 - .../indices.delete_alias/all_path_options.yml | 1 - .../test/indices.exists_template/10_basic.yml | 1 - .../test/indices.forcemerge/10_basic.yml | 2 - .../40_missing_index.yml | 2 - .../50_field_wildcards.yml | 1 - .../20_get_missing.yml | 1 - .../indices.get_mapping/30_missing_index.yml | 1 - .../test/indices.get_settings/20_aliases.yml | 1 - .../indices.get_template/20_get_missing.yml | 1 - .../test/indices.open/20_multiple_indices.yml | 1 - .../indices.put_alias/all_path_options.yml | 1 - .../test/indices.put_mapping/10_basic.yml | 1 - .../indices.put_mapping/all_path_options.yml | 1 - .../indices.put_settings/all_path_options.yml | 1 - .../test/indices.refresh/10_basic.yml | 1 - .../test/indices.rollover/10_basic.yml | 1 - .../indices.rollover/20_max_doc_condition.yml | 1 - .../indices.update_aliases/20_routing.yml | 1 - .../test/indices.upgrade/10_basic.yml | 1 - .../test/info/20_lucene_version.yml | 3 -- .../test/nodes.stats/11_indices_metrics.yml | 1 - .../rest-api-spec/test/ping/10_ping.yml | 1 - .../search.aggregation/240_max_buckets.yml | 1 - .../test/search.aggregation/250_moving_fn.yml | 1 - .../350_variable_width_histogram.yml | 1 - .../test/search/10_source_filtering.yml | 1 - .../search/140_pre_filter_search_shards.yml | 1 - .../test/search/230_interval_query.yml | 2 - .../test/search/240_date_nanos.yml | 1 - .../test/search/90_search_after.yml | 1 - .../test/update/20_doc_upsert.yml | 2 - .../test/update/22_doc_as_upsert.yml | 2 - .../test/update/35_if_seq_no.yml | 1 - .../rest-api-spec/test/update/40_routing.yml | 1 - .../test/update/85_fields_meta.yml | 2 - .../plugins/concurrent-search/build.gradle | 2 +- .../search/aggregations/bucket/package-info | 1 - .../src/test/resources/config/opensearch.yml | 1 - .../hunspell/en_US_custom/settings.yml | 2 +- .../action/admin/invalid.txt.keystore | 1 - .../fieldstats-index-constraints-request.json | 2 +- .../index/analysis/shingle_analysis.json | 2 +- .../index/analysis/shingle_analysis2.json | 2 +- .../org/opensearch/index/analysis/stop.json | 2 +- .../genericstore/test-data.json | 2 +- .../dynamictemplate/pathmatch/test-data.json | 2 +- .../dynamictemplate/simple/test-data.json | 2 +- .../mapper/multifield/merge/test-data.json | 2 +- .../index/mapper/multifield/test-data.json | 2 +- settings.gradle | 1 - .../src/main/resources/provision/addprinc.sh | 2 +- .../resources/provision/krb5.conf.template | 1 - test/fixtures/minio-fixture/build.gradle | 1 - .../org.mockito.plugins.MockMaker | 2 +- .../test/suite1/20_another_test.yml | 1 - 154 files changed, 115 insertions(+), 167 deletions(-) create mode 100644 .github/workflows/code-hygiene.yml create mode 100644 .linelint.yml create mode 100644 doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class diff --git a/.ci/dockerOnLinuxExclusions b/.ci/dockerOnLinuxExclusions index 8061248a87df4..dd518c7043e2f 100644 --- a/.ci/dockerOnLinuxExclusions +++ b/.ci/dockerOnLinuxExclusions @@ -12,4 +12,4 @@ ol-7.7 sles-12.3 # older version used in Vagrant image sles-12.5 sles-15.1 -sles-15.2 \ No newline at end of file +sles-15.2 diff --git a/.github/workflows/code-hygiene.yml b/.github/workflows/code-hygiene.yml new file mode 100644 index 0000000000000..1952630e5bdfa --- /dev/null +++ b/.github/workflows/code-hygiene.yml @@ -0,0 +1,14 @@ +name: Code Hygiene + +on: [push, pull_request] + +jobs: + linelint: + runs-on: ubuntu-latest + name: Check if all files end in newline + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Linelint + uses: fernandrone/linelint@0.0.4 diff --git a/.github/workflows/delete_backport_branch.yml b/.github/workflows/delete_backport_branch.yml index d654df6b40257..387a124b8cb6a 100644 --- a/.github/workflows/delete_backport_branch.yml +++ b/.github/workflows/delete_backport_branch.yml @@ -12,4 +12,4 @@ jobs: - name: Delete merged branch uses: SvanBoxel/delete-merged-branch@main env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 21fb7ab9086ee..ca05aee8be378 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -16,4 +16,4 @@ jobs: args: --accept=200,403,429 --exclude-mail **/*.html **/*.md **/*.txt **/*.json --exclude-file .lychee.excludes fail: true env: - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/wrapper.yml b/.github/workflows/wrapper.yml index d577699b66dc0..be5e7afb56ba0 100644 --- a/.github/workflows/wrapper.yml +++ b/.github/workflows/wrapper.yml @@ -7,4 +7,4 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - - uses: gradle/wrapper-validation-action@v1 \ No newline at end of file + - uses: gradle/wrapper-validation-action@v1 diff --git a/.linelint.yml b/.linelint.yml new file mode 100644 index 0000000000000..7b7bc162eef28 --- /dev/null +++ b/.linelint.yml @@ -0,0 +1,49 @@ +# 'true' will fix files +autofix: true + +ignore: + - .git/ + - .gradle/ + - .idea/ + - '*.sha1' + - '*.txt' + - '.github/CODEOWNERS' + - 'buildSrc/src/testKit/opensearch.build/LICENSE' + - 'buildSrc/src/testKit/opensearch.build/NOTICE' + - 'server/licenses/apache-log4j-extras-DEPENDENCIES' + # Empty files + - 'doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class' + - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/build.gradle' + - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/oss-darwin-tar/build.gradle' + - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/build.gradle' + - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/minor/build.gradle' + - 'buildSrc/src/main/resources/buildSrc.marker' + - 'buildSrc/src/testKit/opensearch-build-resources/settings.gradle' + - 'buildSrc/src/testKit/opensearch.build/settings.gradle' + - 'buildSrc/src/testKit/reaper/settings.gradle' + - 'buildSrc/src/testKit/symbolic-link-preserving-tar/settings.gradle' + - 'buildSrc/src/testKit/testingConventions/empty_test_task/.gitignore' + - 'client/rest-high-level/src/main/resources/META-INF/services/org.opensearch.plugins.spi.NamedXContentProvider' + - 'distribution/bwc/bugfix/build.gradle' + - 'distribution/bwc/maintenance/build.gradle' + - 'distribution/bwc/minor/build.gradle' + - 'distribution/bwc/staged/build.gradle' + - 'libs/ssl-config/src/test/resources/certs/pem-utils/empty.pem' + - 'qa/evil-tests/src/test/resources/org/opensearch/common/logging/does_not_exist/nothing_to_see_here' + - 'qa/os/centos-6/build.gradle' + - 'qa/os/debian-8/build.gradle' + - 'qa/os/oel-6/build.gradle' + - 'qa/os/oel-7/build.gradle' + - 'qa/os/sles-12/build.gradle' + # Test requires no new line for these files + - 'server/src/test/resources/org/opensearch/action/bulk/simple-bulk11.json' + - 'server/src/test/resources/org/opensearch/action/search/simple-msearch5.json' + +rules: + # checks if file ends in a newline character + end-of-file: + # set to true to enable this rule + enable: true + + # if true also checks if file ends in a single newline character + single-new-line: true diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 9b1bc933eb1e3..11fcb324c8cae 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -48,6 +48,7 @@ - [Distributed Framework](#distributed-framework) - [Submitting Changes](#submitting-changes) - [Backports](#backports) + - [LineLint](#linelint) # Developer Guide @@ -472,3 +473,18 @@ See [CONTRIBUTING](CONTRIBUTING.md). ## Backports The Github workflow in [`backport.yml`](.github/workflows/backport.yml) creates backport PRs automatically when the original PR with an appropriate label `backport ` is merged to main with the backport workflow run successfully on the PR. For example, if a PR on main needs to be backported to `1.x` branch, add a label `backport 1.x` to the PR and make sure the backport workflow runs on the PR along with other checks. Once this PR is merged to main, the workflow will create a backport PR to the `1.x` branch. + +## LineLint +A linter in [`code-hygiene.yml`](.github/workflows/code-hygiene.yml) that validates simple newline and whitespace rules in all sorts of files. It can: +- Recursively check a directory tree for files that do not end in a newline +- Automatically fix these files by adding a newline or trimming extra newlines. + +Rules are defined in `.linelint.yml`. + +Executing the binary will automatically search the local directory tree for linting errors. + + linelint . + +Pass a list of files or directories to limit your search. + + linelint README.md LICENSE diff --git a/README.md b/README.md index 6a9a2a69d7367..bb1def63340fe 100644 --- a/README.md +++ b/README.md @@ -45,4 +45,4 @@ Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. OpenSearch is a registered trademark of Amazon Web Services. -OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. \ No newline at end of file +OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V. diff --git a/RELEASING.md b/RELEASING.md index 50bb965b8d551..1ef59446f6e31 100644 --- a/RELEASING.md +++ b/RELEASING.md @@ -1,3 +1,3 @@ ## Releasing -This project follows [OpenSearch project branching, labelling, and releasing](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). \ No newline at end of file +This project follows [OpenSearch project branching, labelling, and releasing](https://github.com/opensearch-project/.github/blob/main/RELEASING.md). diff --git a/SECURITY.md b/SECURITY.md index 0b85ca04ed260..b86292104335f 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,3 +1,3 @@ ## Reporting a Vulnerability -If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue. \ No newline at end of file +If you discover a potential security issue in this project we ask that you notify AWS/Amazon Security via our [vulnerability reporting page](http://aws.amazon.com/security/vulnerability-reporting/) or directly via email to aws-security@amazon.com. Please do **not** create a public GitHub issue. diff --git a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs index 6fd0a9aab1327..29abf99956411 100644 --- a/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs +++ b/buildSrc/src/main/resources/eclipse.settings/org.eclipse.core.resources.prefs @@ -3,4 +3,4 @@ encoding//src/main/java=UTF-8 encoding//src/main/resources=UTF-8 encoding//src/test/java=UTF-8 encoding//src/test/resources=UTF-8 -encoding/=UTF-8 \ No newline at end of file +encoding/=UTF-8 diff --git a/buildSrc/src/main/resources/minimumGradleVersion b/buildSrc/src/main/resources/minimumGradleVersion index 6b0e58e78f5ee..815da58b7a9ed 100644 --- a/buildSrc/src/main/resources/minimumGradleVersion +++ b/buildSrc/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -7.4.1 \ No newline at end of file +7.4.1 diff --git a/buildSrc/src/testKit/testingConventions/build.gradle b/buildSrc/src/testKit/testingConventions/build.gradle index 418e833e8cb14..676960bcc8b70 100644 --- a/buildSrc/src/testKit/testingConventions/build.gradle +++ b/buildSrc/src/testKit/testingConventions/build.gradle @@ -88,6 +88,3 @@ project(':valid_setup_with_base') { } } } - - - diff --git a/buildSrc/src/testKit/testingConventions/settings.gradle b/buildSrc/src/testKit/testingConventions/settings.gradle index c4206edd63ff7..bb64f39e020c5 100644 --- a/buildSrc/src/testKit/testingConventions/settings.gradle +++ b/buildSrc/src/testKit/testingConventions/settings.gradle @@ -16,4 +16,4 @@ include 'all_classes_in_tasks' include 'not_implementing_base' include 'valid_setup_no_base' include 'valid_setup_with_base' -include 'tests_in_main' \ No newline at end of file +include 'tests_in_main' diff --git a/buildSrc/src/testKit/thirdPartyAudit/settings.gradle b/buildSrc/src/testKit/thirdPartyAudit/settings.gradle index 582faadddaef1..603d8b7da6e5d 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/settings.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/settings.gradle @@ -9,4 +9,4 @@ * GitHub history for details. */ -include 'sample_jars' \ No newline at end of file +include 'sample_jars' diff --git a/dev-tools/prepare_release_update_documentation.py b/dev-tools/prepare_release_update_documentation.py index c7eae4eeb3245..d4edbb110f278 100644 --- a/dev-tools/prepare_release_update_documentation.py +++ b/dev-tools/prepare_release_update_documentation.py @@ -136,4 +136,3 @@ def callback(line): print('WARNING: no documentation references updates for release %s' % (release_version)) print('*** Done.') - diff --git a/dev-tools/signoff-check.sh b/dev-tools/signoff-check.sh index 56cb49455165e..5fe00c430ca79 100755 --- a/dev-tools/signoff-check.sh +++ b/dev-tools/signoff-check.sh @@ -28,4 +28,3 @@ done # Return non-zero error code if any commits were missing signoff exit $missingSignoff - diff --git a/distribution/docker/src/test/resources/rest-api-spec/test/10_info.yml b/distribution/docker/src/test/resources/rest-api-spec/test/10_info.yml index 2b0f6683a24cf..97b3b7b5d0f4d 100644 --- a/distribution/docker/src/test/resources/rest-api-spec/test/10_info.yml +++ b/distribution/docker/src/test/resources/rest-api-spec/test/10_info.yml @@ -7,4 +7,3 @@ - is_true: version - is_true: version.number - match: { version.build_type: "docker" } - diff --git a/distribution/docker/src/test/resources/rest-api-spec/test/11_nodes.yml b/distribution/docker/src/test/resources/rest-api-spec/test/11_nodes.yml index 95ea022696942..a6b78645087f4 100644 --- a/distribution/docker/src/test/resources/rest-api-spec/test/11_nodes.yml +++ b/distribution/docker/src/test/resources/rest-api-spec/test/11_nodes.yml @@ -123,4 +123,3 @@ - match: $body: | /^(\S{5,}\n)+$/ - diff --git a/distribution/src/bin/opensearch-env-from-file b/distribution/src/bin/opensearch-env-from-file index 73cd11123bfff..be5b428c268c8 100644 --- a/distribution/src/bin/opensearch-env-from-file +++ b/distribution/src/bin/opensearch-env-from-file @@ -47,4 +47,3 @@ for VAR_NAME_FILE in OPENSEARCH_PASSWORD_FILE KEYSTORE_PASSWORD_FILE ; do unset "$VAR_NAME_FILE" fi done - diff --git a/distribution/src/bin/opensearch-env.bat b/distribution/src/bin/opensearch-env.bat index bc8a6ce53a5f5..96770f72f35c8 100644 --- a/distribution/src/bin/opensearch-env.bat +++ b/distribution/src/bin/opensearch-env.bat @@ -74,4 +74,3 @@ if defined JAVA_OPTS ( rem check the Java version %JAVA% -cp "%OPENSEARCH_CLASSPATH%" "org.opensearch.tools.java_version_checker.JavaVersionChecker" || exit /b 1 - diff --git a/doc-tools/build.gradle b/doc-tools/build.gradle index 98b2149cb59a9..c47097c3d6035 100644 --- a/doc-tools/build.gradle +++ b/doc-tools/build.gradle @@ -8,4 +8,3 @@ version '1.0.0-SNAPSHOT' repositories { mavenCentral() } - diff --git a/doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class b/doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class new file mode 100644 index 0000000000000000000000000000000000000000..8729def18e4b3eda7184b1e3e49f1bf055d152db GIT binary patch literal 14156 zcmbta34B!5)j#KEd6UTlhOlT*aeyetJXqnwXLEAqD9eG+uHWi?weigVi&vE(nYJ4?|<%lZKq+q?oGXiS2dq0V|fYLW!QhZ13X7?aV_*tEX!q827@Wt!3yk0mi8wJ{VOvnnHFkALg_(-mTe{ZHS zRvL5x_&zff!%Vu6Rsr9kaJVrFW-hPH!cGQ}4ON#Iv>NA}{4pz*c&U+^gqGhVYBfVEL#m#T_Ss2VNw~*D5WcDt3e^Aipk71sfR$kQZT%$si2v_ z=o9KP*>0OjGijD=*K1OQu7XHn5l2-xDsH|bF%u{`St@GMY&uDnikTD_g&+dh8taJ; zhOMxx4~6A+n3N#MvnK+59!kZb(`V;MWvkQPP)dUYQw3dKmcg&vYtk;-4VuQRU7+bq zcZF=$HRzo{giOr=-cgBl4p3e|e~n51MgI+Z)6;A9^g;94EHcrw1`(h^y3VBQ>D^%3 z-cT~2L4i8G1a)T@rumvzCtL~|1BSGxuRS#2=)6e*&{|3#pqmW35r%8bDYfNt@ov%w z>1J3jki;<^!EH?KlR4%1&^s$Rdw^OWrjHnO3)8|ez&OaL%bjA{d z7O}#CP(K7}UOuLC7m*>@?RL7uppU^J7&`l|E(CX>=bLv$E<^5NHqWH|YQkfg^BC z+x%8ebUFu}GP1!-pP>f~`m88||AUKVyh#tz=R|)d!jV`AmIkcYYS0(p3ynLyJsuv6 zTF$1W^m%&Nq=UlHC6VNYSPU2p%c&kU>5vGBsRg7d-Y-s&bQ~5R;eB$>5tAOH$8EW9 zf!xQF8My}zT>;g6*%nErK*FQ6!=NYN)o4VfdrENZV3?7e zBgwAau~e^>ioly3>~Gg@*CZ7K?Dz{zCynNg)q{~}*h+Zl+f1jn`orDR0E-;EmaPIEM@%=_ zvmGcJJoFQw#J}50Etb`ODy#i$)FSSFWBA`)L}uP0czemDU(&Czpp{67Mn88Fs%6Zo zP^nDbvIdV$vT)bc?MWP$WqY z{TX7QyMx<=H(xdBuk^QU-h@N#>;mDMggj~v`@2d15WXoCz7bH<0Eyvy&7{|bcf66L zYxlhLhG?lbb99C6Sb6C!z(^;+3lrIClbI12O~>(J+#SJ_XoP9~Byc>&2?)n~QkX3< zP1a%!rCi9x1{ZdMwB^mzeC;CNYw1k(dDd!KTSm z1UEBbCR<~@RszO9ybh9$<&z$sh8__Ysrf?*%kRXZvfOl&%XkKq8TJx+XwBwn$4NDK z7Hr`7kPE`VCw!90Co>`s1My@u)B_~Wu54<`UeIBA8GMS#r;6<;MEKQT!gJYg@aZlk zZDpnXKa=MXEPi2UFbExTH!m=`g3o{+O2ymaS6hjuP!b!R=}2eR#D;M;F7dELTU#Nu zecF8iTKqViqjS01|44mz%L2+XiDgB(3W}ydCKYrQjy;`MlC##ALQQ*4WQk*5nI$ z6^wo;w!1x)>gg5dW8yX8s`F}YHn<7!%I>@BQt@~+S=Ss1A=XdAWK?K#%q_eIpaz27 zEjn*8{?G;wuSEdoh=5;*(*CW3DgUbAPH5Cf*z$+1o+uIGE{o)a8G(?7QX33Ap<&s%4A)(@N8~s|RT~ZNKt5^w zsk^M6!IXsbj?OKEO6Wu!z+FtSw-vTd0N?0Ma>PrA4FCWfu>oEO!_wG6g+8g<>*b62 z5`#A*9W&-Insprb*F>TaIFm2s%it;5IT`4+jFjiB>H=bmzIJ^$NUs{aWg^F}8{w|A z9sGO_nY@*Iuv^^W2#EW2E-mwsYYaNx4(i4qn*|nf10IHcSU#3-ZIt-au)?>A!<$NE zaW9-n7z3BTaTbW+Xy3}oe5y2SX5Wnsw zMG&h!dnR-=vyaTQBG1Zpwr;{0PqKHj(-5wSCmb`_90yS^n23(@9+U#t4-OunAq4KnwKK;{*B_fJn4e|s;#hu>%L zdma65AHadoN!Y=Fnf!kK0FDbY51EGlkptl9;Tpc1O#UDv5I1`4{3+8}*)WcolkFfl zu1*NlbX&N zcG2%KgFLm_`ZCJm7I|#@8<*X9-3QltI^ql>}8%S zft(o)2Hy`D^26Hf82N6Tuat-QvnD?vaZ5=k>6!wN@+1fcx><$mVgacI7`@Q@t{*A%Eb}w!8{LVqp z3kgAfnSTo!!;4BH7=dH%s_pDi6&vmD1~31fUorR(V1J>8?NB&Nxr0ppqt3f+v-Dja zGEDqulZxmmi4$Hm`L7ZN6t#4A20O)NKQ12m-_!RH<3ta6{cOP9ZzYqs`a^iRZ7>>b zcWaJt=D`%Q^ZdFbCFcT3{Dz6t3Gxx(>`TWdnZsAMF2t<2Og;Wdp-dgW=R@=5E|wqR3ZTh69srkWvL z#qGi74K^IJOf_4Yih|v1TO=!}PBsav%GVrIouY?#wloDhB|)f8Gu2#a^4U#D5w>>( z11$j!kKa`Dq;q;_OIL71Cm_u}@d8s-NN*`l(cRjVKE;`)S}5&P1#er@M>kXz)0_-z z1>#+UJ-urpRy5oqVX{GUVP2x4{;lCq#aT>?ZCS}HXrn`H_~)f+u{z68bs+e-%4GC3 zQ!P#lk^;;yiGu~&_wP~(HpcI0m54`L)Rhw!DYa_aIn~*Zk*51xcJGfel zw;@q0S7cd4l^RI4!V5}!gM$<6cWQ#n3+_Kr@I zPhBAkhD_C@nlbrGpK3Ij5QP|LaRRJyv zAcfa%$?K0K%_GP?-u##}2Zpm4HA9{Rkq7mXthyKV(yY3SpORIdidlHGf?tx`dR^j1 zU6;7()+O$#b&1y~*k=yD6`g`#yh+K_ee1+xqIEjjPA5Np=b_e#v5fNM>ccd@y7&-P zR3D);4^gdMEPfb$iq69CY@CB=ITg}MoS+HNG-Kc8v;?iDy#_6%WvH<{9>FXdlI41z zyD(3oZ}sDJ&gR3&In-do^G2va(WaWmsc|!^O(V3%uC9}6Te{jV)u2;-2&)umh~{X# ztOZV6fsu7IA2;Yra6P(;j%cW6+dDc?d8nQ`G)CUxKy0^k0$!H)5BRk)QeacAy7mxl zsII9!Oq()Tn5z4Z0vAt!Zcplc{0^n<(+f5>(q~&@%ixx0aLQ+K>aWnbC|A+fvY=e;U<3Qh%M8G@6tixi_W-K*V!fH9PEaKk8iWgx zE3xb#(Ny%|PSk<_I@E--tEwN>puS%L)DH=~w;`5B*YDET2g!-8H z0@(IjAlA33g1(ytT2&gfDh&{B3;@n2=#vg;9^yBt!ocmAlE)1;IN+%wS(WD!e<1@B?bL_=IYyr!p@)EedZfB$m>wCT z2b}UgDTfR1rG2^qpd1mLd{NVIb%usO4q!Y;PwuCM?)TIC=@j?-nf)}){r>6!^5oF` z@gZu^`xM0y)Hw5X096i6e>|2NFJ1!!=U-si+dU2S3K1{z59=7FXqt+Wg zqcFWh1GwfKq+dgFUZ!hsGkGoj32!j|Odq1Z&=BqzAECd2%a7C7fuJAYvhYQ;{|48B zzoQfMI&k|IF7p_Xs-KH9q~v)nDe)z|#aL@Oc5%6EJ(avc;TgsW=qQ`j_Smd;g3far z4BDf)?*zy@YD_7?^#*_Lf!>-6ZomyUwh-aH#bM56z`$=@o~$kV-7x)OADOjff5OLJ znvLGSNUx^E@dH#;_+UYXO|_=usbumrIIBLY|u!1Ngy_t?v+V3Gk7Y$F zsmxl4St6Ik_fiSe+b~yUSfdjXM|>Jh;kh)G7vT66bUL4j$C3-Fo~!VjubP^;me%uP zyoRfzEBP$yZey+&KLeeN{cZgJ37iP?*=mckRN zX_)^>U`W)^R5GUe-lRDO7ka!)rywJX+g1mQuVO7P#vH8~VFVB(43G0M?1wK{lM7DW zOS5pi4F|aqhrMWoTfsjM);z_*y>wH}F%AZ*7d^(X0riFDg(KX}R9`$q+slhxHCta) zUZkb>2yfB?T<;m83(7rqKL_iLMI(GUG%{aNUxJ*Kx{o^Oo{f>)cstO&0vFo7909r;aj$t5 z_W`M!>0*vTMvG`SE#`jo8ko0$V;qOY2gbCWx5p1)yb!8~ir)m7C&*8REt70;&;U%1 z@SaX3Iqi;E>)!KDVBP%FN~pk@hE0^@_W-nZC|H~;!eI-r1<}nQ=@sn|0oX`~6|NRm z*rmV}?~<=;6;$yJBYY#IgN_D3JTQz9-~Ot8j6Wp9J|e?zt1rY-cgj+CK}++!2dLI= zIL1RG{25`E^1|}sBmAHqTwYjRUOdbXITi6uY^v6Sg^5E{#9Kkx9`bP*RJEvrw?Ut5 zrL(vfRE>FFeLaNBBv>;+JGF9E1AeMZ*2! z-^mm$1<#5!j__Bq)@wTs!rqN%g*VWh{2n@l`jh;DEO|PXrp7Tx zo`54qeNBT`@GbaV1yn~1`P=**?A%Tz{9UNDVi@ zQSk{{K*cumL;V?e$ft#6sR)Z>2%UD=;Ie!}-=Ibr>_`v>V__xlnH&P&20*0kxHt@l z`3Hd<-{}GL5cT7~82_;G)Zlzygi;to%2KB^ol<N9>CkO2k9mL9BwndkY(bIr+MUfnn#ZFKeT7y zp;!2y{4a;ni0a;?4uhUAJ;VD3b@_ZA1Zel!6jIk<5HQc@9^}_HKTCxV;mrSj;)~w( z)d%@MaBz*_5EX8!KB&ao_04i?1*jD}6?$5_Ot*F%wIa9jRq1jEsv@a*-M(j}mR{c@ zwW&^@Dn-rXR^WZy?<;YqJt?(Pw{N=C%G?@UlKXu=r^fKX?k{w^PLi(llI2pH;r5*> zwV7`1bg4~uYxAWx#jTwowP|*3^CSG)L-25Ykj7J0C6Kz1s!`S;Dm@qF3Pe6FDA!Ps zI#G7f<#Z*=5JH3)$~dgwyHH+3@1q-0-bA<29VqXlPtksq2k0Ss1m&ZM0G~qnG<}1f zNBIK%h+ahbQ+UyTK=}&1<=0TY4)?8CRl+Pz=b0#H;my^2lnbCa&O*P}Xoz zWYGn?sAjF9s!vFmhEjeisZD;6;2QI#J2F3+A(^&Ob4PV#sy?q|ifmSdC9yLEv#Lkb zB4}d;>p4T6J*>`wFP(9k^#S0*ECJwlI$)NnSE4ywMMvbc_>?4_oaO7Wyd2nv_*c9t z7}F88l7VD5@nEmKK5J#4Eo1-_i7IEE%?_Al)fv-m6hca}TrGR0aMns?s!^@ZfzfRd z0u+(*tzvA0l$Nv28kC|Ez16NmkjY2b%UfB?d&eyAwiK!stfJH!CCc<{DpCO|P(jtH zy7X7K+N3VlUz^os>T>Qnvj09LD*8dz1X ecBrHpEG~hEXaB8p%67X{J5hSTe~rkUwEr(LO30D` literal 0 HcmV?d00001 diff --git a/libs/dissect/build.gradle b/libs/dissect/build.gradle index 47f7970ea5ac0..dc98d2820ef52 100644 --- a/libs/dissect/build.gradle +++ b/libs/dissect/build.gradle @@ -40,4 +40,3 @@ dependencies { tasks.named('forbiddenApisMain').configure { replaceSignatureFiles 'jdk-signatures' } - diff --git a/libs/dissect/src/test/resources/specification/tests.json b/libs/dissect/src/test/resources/specification/tests.json index 1cb85ce651940..490383ba3fedb 100644 --- a/libs/dissect/src/test/resources/specification/tests.json +++ b/libs/dissect/src/test/resources/specification/tests.json @@ -360,4 +360,4 @@ "append": "" } -] \ No newline at end of file +] diff --git a/libs/geo/build.gradle b/libs/geo/build.gradle index fac5c1b84d2b0..8b81129f43b67 100644 --- a/libs/geo/build.gradle +++ b/libs/geo/build.gradle @@ -42,4 +42,3 @@ tasks.named('forbiddenApisMain').configure { // TODO: Need to decide how we want to handle for forbidden signatures with the changes to core replaceSignatureFiles 'jdk-signatures' } - diff --git a/libs/grok/src/main/resources/patterns/exim b/libs/grok/src/main/resources/patterns/exim index 68c4e5cd7d0d7..e81eace04d32d 100644 --- a/libs/grok/src/main/resources/patterns/exim +++ b/libs/grok/src/main/resources/patterns/exim @@ -10,4 +10,3 @@ EXIM_PROTOCOL (P=%{NOTSPACE:protocol}) EXIM_MSG_SIZE (S=%{NUMBER:exim_msg_size}) EXIM_HEADER_ID (id=%{NOTSPACE:exim_header_id}) EXIM_SUBJECT (T=%{QS:exim_subject}) - diff --git a/libs/grok/src/main/resources/patterns/junos b/libs/grok/src/main/resources/patterns/junos index 4eea59d08ccf9..2da91cc6ce3df 100644 --- a/libs/grok/src/main/resources/patterns/junos +++ b/libs/grok/src/main/resources/patterns/junos @@ -6,4 +6,3 @@ RT_FLOW1 %{RT_FLOW_EVENT:event}: %{GREEDYDATA:close-reason}: %{IP:src-ip}/%{INT: RT_FLOW2 %{RT_FLOW_EVENT:event}: session created %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{IP:nat-src-ip}/%{INT:nat-src-port}->%{IP:nat-dst-ip}/%{INT:nat-dst-port} %{DATA:src-nat-rule-name} %{DATA:dst-nat-rule-name} %{INT:protocol-id} %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} %{INT:session-id} .* RT_FLOW3 %{RT_FLOW_EVENT:event}: session denied %{IP:src-ip}/%{INT:src-port}->%{IP:dst-ip}/%{INT:dst-port} %{DATA:service} %{INT:protocol-id}\(\d\) %{DATA:policy-name} %{DATA:from-zone} %{DATA:to-zone} .* - diff --git a/libs/grok/src/main/resources/patterns/postgresql b/libs/grok/src/main/resources/patterns/postgresql index c5b3e90b7250f..6901c6253e926 100644 --- a/libs/grok/src/main/resources/patterns/postgresql +++ b/libs/grok/src/main/resources/patterns/postgresql @@ -1,3 +1,2 @@ # Default postgresql pg_log format pattern POSTGRESQL %{DATESTAMP:timestamp} %{TZ} %{DATA:user_id} %{GREEDYDATA:connection_id} %{POSINT:pid} - diff --git a/libs/ssl-config/build.gradle b/libs/ssl-config/build.gradle index 740d5e309350c..456641f0d7645 100644 --- a/libs/ssl-config/build.gradle +++ b/libs/ssl-config/build.gradle @@ -61,4 +61,3 @@ tasks.test { jvmArgs += ["--add-opens", "java.base/java.security.cert=ALL-UNNAMED"] } } - diff --git a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/cjk_analysis.json b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/cjk_analysis.json index 89a1281473cd7..c69b889c914a4 100644 --- a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/cjk_analysis.json +++ b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/cjk_analysis.json @@ -34,4 +34,4 @@ } } } -} \ No newline at end of file +} diff --git a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/pattern_capture.json b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/pattern_capture.json index d82fb987e6ed2..5057a1e6d7f9e 100644 --- a/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/pattern_capture.json +++ b/modules/analysis-common/src/test/resources/org/opensearch/analysis/common/pattern_capture.json @@ -43,4 +43,4 @@ } } } -} \ No newline at end of file +} diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml index 2015fe31fccb5..1637c8736134f 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -68,4 +68,3 @@ - match: { tokens.1.token: "f" } - match: { tokens.2.token: "g" } - match: { tokens.3.token: "h" } - diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_analysis_stats.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_analysis_stats.yml index a19a1f2721910..5468da5216bb4 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_analysis_stats.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/cluster.stats/10_analysis_stats.yml @@ -119,4 +119,3 @@ - match: { indices.analysis.built_in_analyzers.2.name: spanish } - match: { indices.analysis.built_in_analyzers.2.count: 2 } - match: { indices.analysis.built_in_analyzers.2.index_count: 2 } - diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml index f0f8765ab5130..42d1c23001300 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/indices.analyze/10_synonyms.yml @@ -76,4 +76,3 @@ - match: { tokens.5.token: dude } - match: { tokens.5.position: 4 } - match: { tokens.5.positionLength: null } - diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml index ae039e453be6c..4388de3eef30a 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml @@ -229,4 +229,3 @@ setup: query: bar baz analyzer: lower_graph_syns - match: { hits.total: 1 } - diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals.yml index 35a611d13f359..9ad68e960421c 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/70_intervals.yml @@ -56,4 +56,3 @@ setup: use_field: text_en max_gaps: 1 - match: { hits.total.value: 1 } - diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 77a1df81a296a..ef8332c2670d0 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -91,4 +91,3 @@ teardown: get: index: test id: 3 - diff --git a/modules/ingest-user-agent/build.gradle b/modules/ingest-user-agent/build.gradle index cd04925287b8f..a3752ad1c7f7e 100644 --- a/modules/ingest-user-agent/build.gradle +++ b/modules/ingest-user-agent/build.gradle @@ -43,4 +43,3 @@ restResources { testClusters.all { extraConfigFile 'ingest-user-agent/test-regexes.yml', file('src/test/test-regexes.yml') } - diff --git a/modules/ingest-user-agent/src/test/test-regexes.yml b/modules/ingest-user-agent/src/test/test-regexes.yml index e41dec700c047..8815c85c7c6e9 100644 --- a/modules/ingest-user-agent/src/test/test-regexes.yml +++ b/modules/ingest-user-agent/src/test/test-regexes.yml @@ -1,3 +1,3 @@ user_agent_parsers: - regex: '.*' - family_replacement: 'Test' \ No newline at end of file + family_replacement: 'Test' diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index e3feacd71f060..9d7b0e2f0979c 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -52,4 +52,3 @@ tasks.named("dependencyLicenses").configure { mapping from: /lucene-.*/, to: 'lucene' mapping from: /asm-.*/, to: 'asm' } - diff --git a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 index 7dd168833e347..21b03b85d8edd 100644 --- a/modules/lang-painless/src/main/antlr/PainlessLexer.g4 +++ b/modules/lang-painless/src/main/antlr/PainlessLexer.g4 @@ -124,4 +124,4 @@ ID: [_a-zA-Z] [_a-zA-Z0-9]*; mode AFTER_DOT; DOTINTEGER: ( '0' | [1-9] [0-9]* ) -> mode(DEFAULT_MODE); -DOTID: [_a-zA-Z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE); \ No newline at end of file +DOTID: [_a-zA-Z] [_a-zA-Z0-9]* -> mode(DEFAULT_MODE); diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml index 000e1af694d7d..aa01647811c83 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml @@ -139,4 +139,3 @@ setup: - is_false: aggregations.placeholder.buckets.0.str_terms.buckets.1.key_as_string - match: { aggregations.placeholder.buckets.0.str_terms.buckets.1.doc_count: 1 } - match: { aggregations.placeholder.buckets.0.the_bucket_script.value: 2.0 } - diff --git a/modules/opensearch-dashboards/build.gradle b/modules/opensearch-dashboards/build.gradle index 9bda17243bdb4..f76ca739faf81 100644 --- a/modules/opensearch-dashboards/build.gradle +++ b/modules/opensearch-dashboards/build.gradle @@ -41,4 +41,3 @@ dependencies { testClusters.all { module ':modules:reindex' } - diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index 24742416de6f2..702f0e9bb0f8b 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -75,4 +75,3 @@ testClusters.all { "http://snapshot.test*,http://${urlFixture.addressAndPort}" }, PropertyNormalization.IGNORE_VALUE } - diff --git a/modules/systemd/build.gradle b/modules/systemd/build.gradle index b157fd9321fc9..26e094a9eeae1 100644 --- a/modules/systemd/build.gradle +++ b/modules/systemd/build.gradle @@ -32,4 +32,3 @@ opensearchplugin { description 'Integrates OpenSearch with systemd' classname 'org.opensearch.systemd.SystemdPlugin' } - diff --git a/plugins/analysis-icu/src/test/resources/org/opensearch/index/analysis/KeywordTokenizer.rbbi b/plugins/analysis-icu/src/test/resources/org/opensearch/index/analysis/KeywordTokenizer.rbbi index 8e6de8aa94abb..86eb398ee9157 100644 --- a/plugins/analysis-icu/src/test/resources/org/opensearch/index/analysis/KeywordTokenizer.rbbi +++ b/plugins/analysis-icu/src/test/resources/org/opensearch/index/analysis/KeywordTokenizer.rbbi @@ -18,4 +18,4 @@ # Apply rule status {200}=RBBI.WORD_LETTER, which is mapped # to token type by DefaultICUTokenizerConfig. -.+ {200}; \ No newline at end of file +.+ {200}; diff --git a/plugins/analysis-kuromoji/build.gradle b/plugins/analysis-kuromoji/build.gradle index 60738fb28b6d5..426b85f44bf55 100644 --- a/plugins/analysis-kuromoji/build.gradle +++ b/plugins/analysis-kuromoji/build.gradle @@ -46,4 +46,3 @@ restResources { tasks.named("dependencyLicenses").configure { mapping from: /lucene-.*/, to: 'lucene' } - diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml index 1be0d8525a1c6..2268d30c986df 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/10_metaphone.yml @@ -31,4 +31,3 @@ - match: { tokens.1.token: joe } - match: { tokens.2.token: BLKS } - match: { tokens.3.token: bloggs } - diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml index 84b0129414c8e..40215cc469fc9 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/20_double_metaphone.yml @@ -28,4 +28,3 @@ - length: { tokens: 1 } - match: { tokens.0.token: SPRKLF } - diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml index bdd1ddef388df..dcc46484780dc 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/30_beider_morse.yml @@ -30,4 +30,3 @@ - length: { tokens: 1 } - match: { tokens.0.token: Svarts } - diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml index bee4c8bf5f432..9b173a710ea43 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/50_daitch_mokotoff.yml @@ -27,4 +27,3 @@ - length: { tokens: 1 } - match: { tokens.0.token: "645740" } - diff --git a/plugins/analysis-smartcn/build.gradle b/plugins/analysis-smartcn/build.gradle index 92f2774854715..d74d314ab0673 100644 --- a/plugins/analysis-smartcn/build.gradle +++ b/plugins/analysis-smartcn/build.gradle @@ -47,4 +47,3 @@ restResources { tasks.named("dependencyLicenses").configure { mapping from: /lucene-.*/, to: 'lucene' } - diff --git a/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone b/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone index 6cf886270bef1..218127ccfb695 100644 --- a/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone +++ b/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone @@ -1 +1 @@ -europe-west1-b \ No newline at end of file +europe-west1-b diff --git a/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/project-id b/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/project-id index 25b8069381897..44be476f3ae83 100644 --- a/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/project-id +++ b/plugins/discovery-gce/src/test/resources/org/opensearch/discovery/gce/computeMetadata/v1/project/project-id @@ -1 +1 @@ -metadataserver \ No newline at end of file +metadataserver diff --git a/plugins/examples/build.gradle b/plugins/examples/build.gradle index e4e0ca6f7be99..460c6e81eac5c 100644 --- a/plugins/examples/build.gradle +++ b/plugins/examples/build.gradle @@ -36,4 +36,3 @@ configure(project('painless-whitelist')) { } } } - diff --git a/plugins/examples/custom-settings/build.gradle b/plugins/examples/custom-settings/build.gradle index 104660c458991..5b35d887b3db1 100644 --- a/plugins/examples/custom-settings/build.gradle +++ b/plugins/examples/custom-settings/build.gradle @@ -42,4 +42,3 @@ testClusters.all { // Adds a setting in the OpenSearch keystore before running the integration tests keystore 'custom.secured', 'password' } - diff --git a/plugins/examples/custom-settings/src/main/config/custom.yml b/plugins/examples/custom-settings/src/main/config/custom.yml index 1759e0ff96d40..258e050a0664b 100644 --- a/plugins/examples/custom-settings/src/main/config/custom.yml +++ b/plugins/examples/custom-settings/src/main/config/custom.yml @@ -2,4 +2,4 @@ custom: simple: foo list: [0, 1, 1, 2, 3, 5, 8, 13, 21] - filtered: secret \ No newline at end of file + filtered: secret diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index cc939b15854d5..b97d091af9d08 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -56,4 +56,3 @@ javaRestTest { dependsOn exampleFixture nonInputProperties.systemProperty 'external.address', "${-> exampleFixture.addressAndPort}" } - diff --git a/plugins/examples/script-expert-scoring/build.gradle b/plugins/examples/script-expert-scoring/build.gradle index 9f2bab20a7db0..e4ddd97abbe4c 100644 --- a/plugins/examples/script-expert-scoring/build.gradle +++ b/plugins/examples/script-expert-scoring/build.gradle @@ -39,4 +39,3 @@ opensearchplugin { } test.enabled = false - diff --git a/plugins/ingest-attachment/src/test/resources/org/opensearch/ingest/attachment/test/sample-files/asciidoc.asciidoc b/plugins/ingest-attachment/src/test/resources/org/opensearch/ingest/attachment/test/sample-files/asciidoc.asciidoc index dc06d4e83dd30..4a2b2c388cfc1 100644 --- a/plugins/ingest-attachment/src/test/resources/org/opensearch/ingest/attachment/test/sample-files/asciidoc.asciidoc +++ b/plugins/ingest-attachment/src/test/resources/org/opensearch/ingest/attachment/test/sample-files/asciidoc.asciidoc @@ -2,4 +2,3 @@ = AsciiDoc test Here is a test of the asciidoc format. - diff --git a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/10_basic.yml b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/10_basic.yml index 607fa5bf8b781..88f6f33ad0a66 100644 --- a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/10_basic.yml +++ b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/10_basic.yml @@ -12,4 +12,3 @@ - contains: { 'nodes.$master.plugins': { name: ingest-attachment } } - contains: { 'nodes.$master.ingest.processors': { type: attachment } } - diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml index b4acccf36879d..0e43e9e40b619 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml +++ b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -142,4 +142,3 @@ request_cache: false body: { "query" : {"match_phrase" : { "my_field" : {"query": "~MARK0", "analyzer": "whitespace"} } }, "highlight" : { "type" : "annotated", "fields" : { "my_field" : {} } } } - match: {_shards.failed: 0} - diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml index 20019686d3db1..fbe0e0a8b0066 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/30_snapshot.yml @@ -45,4 +45,3 @@ - do: snapshot.delete_repository: repository: test_snapshot_repository - diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml index 44f29fe0341a6..821110dc52bed 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/30_snapshot.yml @@ -47,4 +47,3 @@ - do: snapshot.delete_repository: repository: test_snapshot_repository - diff --git a/plugins/transport-nio/build.gradle b/plugins/transport-nio/build.gradle index 88355cdf22728..2b9e11cb2521f 100644 --- a/plugins/transport-nio/build.gradle +++ b/plugins/transport-nio/build.gradle @@ -183,4 +183,3 @@ thirdPartyAudit { 'io.netty.handler.ssl.util.OpenJdkSelfSignedCertGenerator' ) } - diff --git a/qa/die-with-dignity/build.gradle b/qa/die-with-dignity/build.gradle index 008e2e19bf72f..db8762fe921bf 100644 --- a/qa/die-with-dignity/build.gradle +++ b/qa/die-with-dignity/build.gradle @@ -35,4 +35,3 @@ testClusters.javaRestTest { } test.enabled = false - diff --git a/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool-cmd1.help b/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool-cmd1.help index d083e3a65348f..60acabffb6544 100644 --- a/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool-cmd1.help +++ b/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool-cmd1.help @@ -1 +1 @@ -cmd1 help \ No newline at end of file +cmd1 help diff --git a/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool.help b/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool.help index 023b1accdffb1..2a5850ba79db2 100644 --- a/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool.help +++ b/qa/evil-tests/src/test/resources/org/opensearch/common/cli/tool.help @@ -1 +1 @@ -tool help \ No newline at end of file +tool help diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml index 92ae11c712b25..23cd1567b49ba 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/70_skip_shards.yml @@ -220,4 +220,3 @@ # When all shards are skipped current logic returns 1 to produce a valid search result - match: { _shards.skipped : 1} - match: { _shards.failed: 0 } - diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml index e1ffcea930a42..840c7f5f6297e 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml @@ -204,4 +204,3 @@ tasks.get: wait_for_completion: true task_id: $task - diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml index 89992eeba616f..6427a45e19f58 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/20_date_range.yml @@ -111,5 +111,3 @@ gte: "2019-02-01T00:00+01:00" lte: "2019-02-01T00:00+01:00" - match: { hits.total: 1 } - - diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml index cb74c33cbd31a..f1c00ee896f92 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml @@ -133,5 +133,3 @@ wait_for_completion: true task_id: $task_id - match: { task.headers.X-Opaque-Id: "Reindexing Again" } - - diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/20_date_range.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/20_date_range.yml index 7dae2a4d6241a..026dcff32e175 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/20_date_range.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/20_date_range.yml @@ -38,4 +38,3 @@ time_frame: gte: "2019-02-01T00:00+01:00" lte: "2019-02-01T00:00+01:00" - diff --git a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml index 7a0cdcbef0786..c8fcebfba67ab 100644 --- a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml +++ b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml @@ -112,4 +112,3 @@ _id: test_id2 pipeline: my_pipeline_1 - f1: v2 - diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml index eaf6b24030a06..c58735f7862e6 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml @@ -102,4 +102,3 @@ - match: { error.processor_type: "script" } - match: { error.type: "script_exception" } - match: { error.reason: "compile error" } - diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml index 0f514f2213492..d7f565f30c93d 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml @@ -34,4 +34,3 @@ id: 1 pipeline: "my_timely_pipeline" body: {} - diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/scripts/master.painless b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/scripts/master.painless index 29880e8fd5f57..82f007e8e4dac 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/scripts/master.painless +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/scripts/master.painless @@ -1 +1 @@ -ctx.bytes_total = ctx.bytes_in + ctx.bytes_out \ No newline at end of file +ctx.bytes_total = ctx.bytes_in + ctx.bytes_out diff --git a/qa/smoke-test-plugins/src/test/java/org/opensearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java b/qa/smoke-test-plugins/src/test/java/org/opensearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java index a7e50601ad9df..a1c61ca97f877 100644 --- a/qa/smoke-test-plugins/src/test/java/org/opensearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java +++ b/qa/smoke-test-plugins/src/test/java/org/opensearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java @@ -49,4 +49,3 @@ public static Iterable parameters() throws Exception { return OpenSearchClientYamlSuiteTestCase.createParameters(); } } - diff --git a/release-notes/opensearch.release-notes-1.0.0-rc1.md b/release-notes/opensearch.release-notes-1.0.0-rc1.md index 2223d732abb98..205873fd282b2 100644 --- a/release-notes/opensearch.release-notes-1.0.0-rc1.md +++ b/release-notes/opensearch.release-notes-1.0.0-rc1.md @@ -411,5 +411,3 @@ Signed-off-by: Abbas Hussain <abbas_10690@yahoo.com> - - diff --git a/release-notes/opensearch.release-notes-1.1.0.md b/release-notes/opensearch.release-notes-1.1.0.md index 0545e106a15a5..ba5a5d4c95c60 100644 --- a/release-notes/opensearch.release-notes-1.1.0.md +++ b/release-notes/opensearch.release-notes-1.1.0.md @@ -386,5 +386,3 @@ Signed-off-by: Sooraj Sinha <soosinha@amazon.com> - - diff --git a/release-notes/opensearch.release-notes-1.2.0.md b/release-notes/opensearch.release-notes-1.2.0.md index 86860e5f872da..d7c75f3c0eaf2 100644 --- a/release-notes/opensearch.release-notes-1.2.0.md +++ b/release-notes/opensearch.release-notes-1.2.0.md @@ -458,4 +458,3 @@ Signed-off-by: Nicholas Walter Knize <nknize@apache.org> - diff --git a/release-notes/opensearch.release-notes-1.2.4.md b/release-notes/opensearch.release-notes-1.2.4.md index dc2852a102c44..dc0bce20a2a00 100644 --- a/release-notes/opensearch.release-notes-1.2.4.md +++ b/release-notes/opensearch.release-notes-1.2.4.md @@ -72,5 +72,3 @@ Signed-off-by: dblock <dblock@amazon.com> - - diff --git a/release-notes/opensearch.release-notes-1.3.0.md b/release-notes/opensearch.release-notes-1.3.0.md index 62c5be8413943..7dd71fa47b72f 100644 --- a/release-notes/opensearch.release-notes-1.3.0.md +++ b/release-notes/opensearch.release-notes-1.3.0.md @@ -1295,5 +1295,3 @@ [Nick Knize](mailto:nknize@apache.org) - Thu, 4 Nov 2021 14:46:57 -0500 Signed-off-by: Nicholas Walter Knize <nknize@apache.org> - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml index 8c8c6d50abf41..c91ec511a0fdb 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/10_basic.yml @@ -223,4 +223,3 @@ - match: { items.0.index.status: 400 } - match: { items.0.index.error.type: illegal_argument_exception } - match: { items.0.index.error.reason: "no write index is defined for alias [test_index]. The write index may be explicitly disabled using is_write_index=false or the alias points to multiple indices without one being designated as a write index" } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml index 3d956dce54289..cb3553abbd435 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml @@ -14,4 +14,3 @@ index: test_index - match: {count: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml index 8b6467eeed975..fabe674697cde 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/30_big_string.yml @@ -14,4 +14,3 @@ index: test_index - match: {count: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml index e29e84740ee5c..fb9554619a818 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/40_source.yml @@ -68,4 +68,3 @@ - match: { items.0.update.get._source.foo: garply } - is_false: items.0.update.get._source.bar - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yml index 789ea5fc19c3f..f04c674d420ee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.nodes/10_basic.yml @@ -131,4 +131,3 @@ - match: $body: | /^(\S{5,}\n)+$/ - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.pending_tasks/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.pending_tasks/10_basic.yml index f8fd8ebef170d..885f6c4a97912 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.pending_tasks/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.pending_tasks/10_basic.yml @@ -11,4 +11,3 @@ local: true - is_true: tasks - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.remote_info/10_info.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.remote_info/10_info.yml index e11eff2b78a3c..75058cefd5c53 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.remote_info/10_info.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.remote_info/10_info.yml @@ -3,4 +3,3 @@ - do: cluster.remote_info: {} - is_true: '' - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic.yml index 5474c9bdf4da0..23eebacabf3f3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cluster.voting_config_exclusions/10_basic.yml @@ -104,4 +104,3 @@ teardown: cluster.post_voting_config_exclusions: node_ids: nodeId node_names: nodeName - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml index 9c048c361bd5c..e1341ac2b5380 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/40_routing.yml @@ -39,4 +39,3 @@ get: index: test_1 id: 1 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml index 27e9350caed70..c3c407cd9173a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/30_routing.yml @@ -30,4 +30,3 @@ index: test_1 id: 1 routing: 5 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml index 921397b238f51..fabf8fb87a7b6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/15_default_values.yml @@ -14,4 +14,3 @@ - match: { _index: test_1 } - match: { _id: '1' } - match: { _source: { foo: "bar" } } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml index 23c7e5cbc90a6..1bafdc3dab21f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -48,5 +48,3 @@ - match: { fields.foo: [bar] } - match: { fields.count: [1] } - match: { _source.foo: bar } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml index 9ba546d6ef942..7f45b39add8a7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/40_routing.yml @@ -41,4 +41,3 @@ get: index: test_1 id: 1 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml index 9037a9113e937..3f45a1da09dce 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/get/90_versions.yml @@ -80,4 +80,3 @@ id: 1 version: 1 version_type: external_gte - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml index 630cf39dbe65c..f6f497269b043 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/40_routing.yml @@ -40,4 +40,3 @@ get: index: test_1 id: 1 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.delete_alias/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.delete_alias/all_path_options.yml index d1d01cbaaa7e6..67369b67b3249 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.delete_alias/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.delete_alias/all_path_options.yml @@ -221,4 +221,3 @@ setup: catch: param indices.delete_alias: index: "test_index1" - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml index c7892f58a6f59..32b692b16c5d6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.exists_template/10_basic.yml @@ -41,4 +41,3 @@ setup: local: true - is_false: '' - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml index 137736e6823a9..d62c4c8882b13 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml @@ -27,5 +27,3 @@ index: test max_num_segments: 10 only_expunge_deletes: true - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml index 7c7b07b587849..690f83d5f3c2b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/40_missing_index.yml @@ -6,5 +6,3 @@ indices.get_field_mapping: index: test_index fields: field - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml index 2c9ff58b445df..814bd1e3a4063 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_field_mapping/50_field_wildcards.yml @@ -127,4 +127,3 @@ setup: - match: {test_index_2.mappings.t1.full_name: t1 } - match: {test_index_2.mappings.t2.full_name: t2 } - length: {test_index_2.mappings: 2} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_index_template/20_get_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_index_template/20_get_missing.yml index 2dfa466ba0eca..4c855d928d1c0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_index_template/20_get_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_index_template/20_get_missing.yml @@ -17,4 +17,3 @@ setup: catch: missing indices.get_index_template: name: test - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml index 1bbfbc4f4c967..e9502cdc08436 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml @@ -29,4 +29,3 @@ index: test_index ignore_unavailable: true allow_no_indices: false - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/20_aliases.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/20_aliases.yml index da7678202ed34..4f8d1371d90b1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/20_aliases.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_settings/20_aliases.yml @@ -23,4 +23,3 @@ - match: { test-index.settings.index.number_of_replicas: "3" } - match: { test-index.settings.index.number_of_shards: "2" } - match: { test-index.settings.index.refresh_interval: "-1" } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/20_get_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/20_get_missing.yml index 2751f57dacb6c..ee7ba62c9beb4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/20_get_missing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_template/20_get_missing.yml @@ -10,4 +10,3 @@ setup: catch: missing indices.get_template: name: test - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/20_multiple_indices.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/20_multiple_indices.yml index bef5ea8a54651..2720b08514ba3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/20_multiple_indices.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.open/20_multiple_indices.yml @@ -101,4 +101,3 @@ setup: search: rest_total_hits_as_int: true index: test_index3 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/all_path_options.yml index bef57bbddf165..47828c43350b7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/all_path_options.yml @@ -113,4 +113,3 @@ setup: - do: catch: param indices.put_alias: {} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 36317c7ae173c..23f87ea1ec2b3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -143,4 +143,3 @@ - is_false: test_index.mappings.properties.foo.meta.bar - match: { test_index.mappings.properties.foo.meta.baz: "quux" } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml index c1daa76fe3d6e..ca7a21df20ea4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_mapping/all_path_options.yml @@ -159,4 +159,3 @@ setup: indices.get_mapping: {} - match: {test_index1.mappings.properties.text.type: text} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml index 07f1956f0fcca..ac45c4e098e6e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_settings/all_path_options.yml @@ -110,4 +110,3 @@ setup: - match: {test_index1.settings.index.refresh_interval: 1s} - match: {test_index2.settings.index.refresh_interval: 1s} - match: {foo.settings.index.refresh_interval: 1s} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.refresh/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.refresh/10_basic.yml index 6e493a0cce936..bf20d51bc97cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.refresh/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.refresh/10_basic.yml @@ -55,4 +55,3 @@ setup: - match: { _shards.total: 0 } - match: { _shards.successful: 0 } - match: { _shards.failed: 0 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml index dc68ffc9a3b86..a36db0cda8526 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/10_basic.yml @@ -148,4 +148,3 @@ body: conditions: max_docs: 1 - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml index f5d223259dc06..7d1b447b4e293 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.rollover/20_max_doc_condition.yml @@ -52,4 +52,3 @@ - match: { conditions: { "[max_docs: 2]": true } } - match: { rolled_over: true } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/20_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/20_routing.yml index ecedcef0c1a48..c812d84dfe7e3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/20_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.update_aliases/20_routing.yml @@ -132,4 +132,3 @@ setup: index: test_index - match: {test_index.aliases.test_alias: {'index_routing': '5', 'search_routing': '5'}} - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yml index 55070cb8c1f97..62c1a51dace52 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.upgrade/10_basic.yml @@ -69,4 +69,3 @@ indices.upgrade: index: ["test_index", "does_not_exist"] ignore_unavailable: false - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/info/20_lucene_version.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/info/20_lucene_version.yml index 83414fbabc565..427a585815db0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/info/20_lucene_version.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/info/20_lucene_version.yml @@ -2,6 +2,3 @@ "Lucene Version": - do: {info: {}} - is_true: version.lucene_version - - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml index a09619b7255c3..1f1f42890355e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/nodes.stats/11_indices_metrics.yml @@ -224,4 +224,3 @@ - is_false: nodes.$node_id.indices.translog - is_false: nodes.$node_id.indices.recovery - is_true: nodes.$node_id.indices.segments.file_sizes - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/ping/10_ping.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/ping/10_ping.yml index ec07c218dabd9..da160503caab4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/ping/10_ping.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/ping/10_ping.yml @@ -2,4 +2,3 @@ "Ping": - do: { ping: {}} - is_true: '' - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index 82965bda51576..c540814a1690d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -118,4 +118,3 @@ setup: 2: terms: field: date - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml index 339fe72b77730..c0a8d2fb4500c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/250_moving_fn.yml @@ -74,4 +74,3 @@ buckets_path: "the_avg" window: -1 script: "MovingFunctions.windowMax(values)" - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/350_variable_width_histogram.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/350_variable_width_histogram.yml index 071e543e8a25e..cc41ef1fa6cd3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/350_variable_width_histogram.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/350_variable_width_histogram.yml @@ -47,4 +47,3 @@ setup: - match: { aggregations.histo.buckets.1.doc_count: 1 } - match: { aggregations.histo.buckets.2.key: 4.5 } - match: { aggregations.histo.buckets.2.doc_count: 2 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml index 091638d6a07fb..4de2e8142f6ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -196,4 +196,3 @@ setup: # When this test is run during runtime-field's tests we *don't* get floating point errors. Thus the funny assertion here that matches both. - lt: { hits.hits.0.fields.d.0: 3.141 } - gte: { hits.hits.0.fields.d.0: 3.14 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index 31f6f35003e2d..40e1fbcf7a2ab 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -154,4 +154,3 @@ setup: - match: { _shards.failed: 0 } - match: { hits.total: 2 } - length: { aggregations.idx_terms.buckets: 2 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml index 0286d3caf66b8..0b9172e0740ea 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/230_interval_query.yml @@ -750,5 +750,3 @@ setup: - prefix: prefix: out - match: { hits.total.value: 3 } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml index feb875e81a785..1ddba45c97c72 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/240_date_nanos.yml @@ -164,4 +164,3 @@ setup: - match: { aggregations.date.buckets.1.key: 1540857600000 } - match: { aggregations.date.buckets.1.key_as_string: "2018-10-30T00:00:00.000Z" } - match: { aggregations.date.buckets.1.doc_count: 2 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml index 5f5d88dba7687..d7690ac6097ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/90_search_after.yml @@ -224,4 +224,3 @@ - match: {hits.hits.0._index: test } - match: {hits.hits.0._source.timestamp: "2019-10-21 00:30:04.828740" } - match: {hits.hits.0.sort: [1571617804828740000] } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml index 4d03971aba252..cfdd38b9ffd1c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -32,5 +32,3 @@ - match: { _source.foo: bar } - match: { _source.count: 1 } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index c65fc5af27fcc..7ee5c01089ff1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -32,5 +32,3 @@ - match: { _source.foo: bar } - match: { _source.count: 2 } - - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_if_seq_no.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_if_seq_no.yml index f982adf693ad0..c93be37be49f5 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_if_seq_no.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/35_if_seq_no.yml @@ -61,4 +61,3 @@ - match: { errors: true } - match: { items.0.update.status: 409 } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml index 6f43d381e0537..28e42f9dafea9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/40_routing.yml @@ -55,4 +55,3 @@ doc: { foo: baz } - match: { get._source.foo: baz } - diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml index fe76ab5299cda..2d4fda22f4442 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -27,5 +27,3 @@ id: 1 parent: 5 stored_fields: [ _routing ] - - diff --git a/sandbox/plugins/concurrent-search/build.gradle b/sandbox/plugins/concurrent-search/build.gradle index acc3cb5092cd8..0e766dc4fc1ba 100644 --- a/sandbox/plugins/concurrent-search/build.gradle +++ b/sandbox/plugins/concurrent-search/build.gradle @@ -39,4 +39,4 @@ opensearchplugin { } yamlRestTest.enabled = false; -testingConventions.enabled = false; \ No newline at end of file +testingConventions.enabled = false; diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/package-info b/server/src/main/java/org/opensearch/search/aggregations/bucket/package-info index a2cb4a9493c56..52b88548dacdd 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/package-info +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/package-info @@ -21,4 +21,3 @@ * Aggregations module */ package org.opensearch.search.aggregations.bucket; - diff --git a/server/src/test/resources/config/opensearch.yml b/server/src/test/resources/config/opensearch.yml index b6ebc6bd10576..21f4f7b1b933a 100644 --- a/server/src/test/resources/config/opensearch.yml +++ b/server/src/test/resources/config/opensearch.yml @@ -1,3 +1,2 @@ yaml.config.exists: true - diff --git a/server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US_custom/settings.yml b/server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US_custom/settings.yml index 1a91653e56a1d..7e9eed5920f2e 100644 --- a/server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US_custom/settings.yml +++ b/server/src/test/resources/indices/analyze/conf_dir/hunspell/en_US_custom/settings.yml @@ -1,2 +1,2 @@ ignore_case: true -strict_affix_parsing: true \ No newline at end of file +strict_affix_parsing: true diff --git a/server/src/test/resources/org/opensearch/action/admin/invalid.txt.keystore b/server/src/test/resources/org/opensearch/action/admin/invalid.txt.keystore index 04613ffab7f36..a18f3707ac0d9 100644 --- a/server/src/test/resources/org/opensearch/action/admin/invalid.txt.keystore +++ b/server/src/test/resources/org/opensearch/action/admin/invalid.txt.keystore @@ -1,3 +1,2 @@ admin admin dragon 12345 - diff --git a/server/src/test/resources/org/opensearch/action/fieldstats/fieldstats-index-constraints-request.json b/server/src/test/resources/org/opensearch/action/fieldstats/fieldstats-index-constraints-request.json index eb8ca972dcd52..8fbf7684b6819 100644 --- a/server/src/test/resources/org/opensearch/action/fieldstats/fieldstats-index-constraints-request.json +++ b/server/src/test/resources/org/opensearch/action/fieldstats/fieldstats-index-constraints-request.json @@ -40,4 +40,4 @@ } } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json index 33c09fe8dbd9f..53e5bbd9fa946 100644 --- a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json +++ b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis.json @@ -20,4 +20,4 @@ } } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis2.json b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis2.json index a81ea538f19fe..19b4d24063b8e 100644 --- a/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis2.json +++ b/server/src/test/resources/org/opensearch/index/analysis/shingle_analysis2.json @@ -12,4 +12,4 @@ } } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/analysis/stop.json b/server/src/test/resources/org/opensearch/index/analysis/stop.json index 717c9fdee5b08..c59b0fb5056d0 100644 --- a/server/src/test/resources/org/opensearch/index/analysis/stop.json +++ b/server/src/test/resources/org/opensearch/index/analysis/stop.json @@ -15,4 +15,4 @@ } } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json index b7439dcb9fe6f..14ebf16d92f36 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/genericstore/test-data.json @@ -1,4 +1,4 @@ { "name":"some name", "age":1 -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json index 2e6ec997c4612..765a850b0b663 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/pathmatch/test-data.json @@ -11,4 +11,4 @@ "prop1":"prop1_value" } } -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json index 1ed3c50b98d59..8509d846c13bb 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json +++ b/server/src/test/resources/org/opensearch/index/mapper/dynamictemplate/simple/test-data.json @@ -3,4 +3,4 @@ "age":1, "multi1":"multi 1", "multi2":"multi 2" -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-data.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-data.json index c539fcc885d3c..ec6b1b95ed888 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-data.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/merge/test-data.json @@ -1,4 +1,4 @@ { _id:1, name:"some name" -} \ No newline at end of file +} diff --git a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-data.json b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-data.json index 2e8ab256df92a..fd7ecad71b894 100644 --- a/server/src/test/resources/org/opensearch/index/mapper/multifield/test-data.json +++ b/server/src/test/resources/org/opensearch/index/mapper/multifield/test-data.json @@ -4,4 +4,4 @@ "object1":{ "multi1":"2010-01-01" } -} \ No newline at end of file +} diff --git a/settings.gradle b/settings.gradle index cb8167ee02efe..183a5ec8d1ae1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -136,4 +136,3 @@ if (extraProjects.exists()) { addSubProjects('', extraProjectDir) } } - diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh index a58df6d47f36f..201c437f00b73 100755 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/addprinc.sh @@ -72,4 +72,4 @@ echo "Copying conf to local" # make the configuration available externally cp -v $LOCALSTATEDIR/krb5.conf $BUILD_DIR/krb5.conf.template # We are running as root in the container, allow non root users running the container to be able to clean these up -chmod -R 777 $BUILD_DIR \ No newline at end of file +chmod -R 777 $BUILD_DIR diff --git a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template index ba0832b2b7d99..207fe939fb7a5 100644 --- a/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template +++ b/test/fixtures/krb5kdc-fixture/src/main/resources/provision/krb5.conf.template @@ -59,4 +59,3 @@ .${BUILD_ZONE} = ${REALM_NAME} ${OPENSEARCH_ZONE} = ${REALM_NAME} .${OPENSEARCH_ZONE} = ${REALM_NAME} - diff --git a/test/fixtures/minio-fixture/build.gradle b/test/fixtures/minio-fixture/build.gradle index ba5e0a7d2d814..61f417690b210 100644 --- a/test/fixtures/minio-fixture/build.gradle +++ b/test/fixtures/minio-fixture/build.gradle @@ -30,4 +30,3 @@ apply plugin: 'opensearch.test.fixtures' description = 'Fixture for MinIO Storage service' - diff --git a/test/framework/src/main/resources/mockito-extensions/org.mockito.plugins.MockMaker b/test/framework/src/main/resources/mockito-extensions/org.mockito.plugins.MockMaker index e1795b7b9b3d6..99b0d419fc445 100644 --- a/test/framework/src/main/resources/mockito-extensions/org.mockito.plugins.MockMaker +++ b/test/framework/src/main/resources/mockito-extensions/org.mockito.plugins.MockMaker @@ -1 +1 @@ -org.opensearch.mockito.plugin.PriviledgedMockMaker \ No newline at end of file +org.opensearch.mockito.plugin.PriviledgedMockMaker diff --git a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml index 5e08112253ef0..0d27f91a83dd3 100644 --- a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml +++ b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml @@ -18,4 +18,3 @@ - match: { _type: test } - match: { _id: '1' } - match: { _source: { foo: "bar" } } - From c4b684d3ae0f56786f0b2c40b48d4329bd595a24 Mon Sep 17 00:00:00 2001 From: Poojita Raj Date: Wed, 13 Apr 2022 14:00:43 -0700 Subject: [PATCH 06/41] Add functionality to fast forward local processed checkpoints [segment replication] (#2576) (#2883) * fix local processed checkpoint update (#2576) Signed-off-by: Poojita Raj * separated tests + wrapper function Signed-off-by: Poojita Raj * moved tests + compareAndSet change Signed-off-by: Poojita Raj --- .../index/seqno/LocalCheckpointTracker.java | 28 +++++++++- .../seqno/LocalCheckpointTrackerTests.java | 56 +++++++++++++++++++ 2 files changed, 83 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java b/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java index 7aab597f8816c..8e2d81d0fe711 100644 --- a/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java +++ b/server/src/main/java/org/opensearch/index/seqno/LocalCheckpointTracker.java @@ -33,6 +33,7 @@ package org.opensearch.index.seqno; import com.carrotsearch.hppc.LongObjectHashMap; +import org.opensearch.common.Nullable; import org.opensearch.common.SuppressForbidden; import java.util.concurrent.atomic.AtomicLong; @@ -116,6 +117,13 @@ public void advanceMaxSeqNo(final long seqNo) { nextSeqNo.accumulateAndGet(seqNo + 1, Math::max); } + /** + * Checks that the sequence number is in an acceptable range for an update to take place. + */ + private boolean shouldUpdateSeqNo(final long seqNo, final long lowerBound, @Nullable final AtomicLong upperBound) { + return !((seqNo <= lowerBound) || (upperBound != null && seqNo > upperBound.get())); + } + /** * Marks the provided sequence number as processed and updates the processed checkpoint if possible. * @@ -134,11 +142,29 @@ public synchronized void markSeqNoAsPersisted(final long seqNo) { markSeqNo(seqNo, persistedCheckpoint, persistedSeqNo); } + /** + * Updates the processed sequence checkpoint to the given value. + * + * This method is only used for segment replication since indexing doesn't + * take place on the replica allowing us to avoid the check that all sequence numbers + * are consecutively processed. + * + * @param seqNo the sequence number to mark as processed + */ + public synchronized void fastForwardProcessedSeqNo(final long seqNo) { + advanceMaxSeqNo(seqNo); + final long currentProcessedCheckpoint = processedCheckpoint.get(); + if (shouldUpdateSeqNo(seqNo, currentProcessedCheckpoint, persistedCheckpoint) == false) { + return; + } + processedCheckpoint.compareAndSet(currentProcessedCheckpoint, seqNo); + } + private void markSeqNo(final long seqNo, final AtomicLong checkPoint, final LongObjectHashMap bitSetMap) { assert Thread.holdsLock(this); // make sure we track highest seen sequence number advanceMaxSeqNo(seqNo); - if (seqNo <= checkPoint.get()) { + if (shouldUpdateSeqNo(seqNo, checkPoint.get(), null) == false) { // this is possible during recovery where we might replay an operation that was also replicated return; } diff --git a/server/src/test/java/org/opensearch/index/seqno/LocalCheckpointTrackerTests.java b/server/src/test/java/org/opensearch/index/seqno/LocalCheckpointTrackerTests.java index bcb178e05065c..237066e549b09 100644 --- a/server/src/test/java/org/opensearch/index/seqno/LocalCheckpointTrackerTests.java +++ b/server/src/test/java/org/opensearch/index/seqno/LocalCheckpointTrackerTests.java @@ -331,4 +331,60 @@ public void testContains() { final long seqNo = randomNonNegativeLong(); assertThat(tracker.hasProcessed(seqNo), equalTo(seqNo <= localCheckpoint || seqNos.contains(seqNo))); } + + public void testFastForwardProcessedNoPersistentUpdate() { + // base case with no persistent checkpoint update + long seqNo1; + assertThat(tracker.getProcessedCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); + seqNo1 = tracker.generateSeqNo(); + assertThat(seqNo1, equalTo(0L)); + tracker.fastForwardProcessedSeqNo(seqNo1); + assertThat(tracker.getProcessedCheckpoint(), equalTo(-1L)); + } + + public void testFastForwardProcessedPersistentUpdate() { + // base case with persistent checkpoint update + long seqNo1; + assertThat(tracker.getProcessedCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); + seqNo1 = tracker.generateSeqNo(); + assertThat(seqNo1, equalTo(0L)); + + tracker.markSeqNoAsPersisted(seqNo1); + assertThat(tracker.getPersistedCheckpoint(), equalTo(0L)); + tracker.fastForwardProcessedSeqNo(seqNo1); + assertThat(tracker.getProcessedCheckpoint(), equalTo(0L)); + assertThat(tracker.hasProcessed(0L), equalTo(true)); + assertThat(tracker.hasProcessed(atLeast(1)), equalTo(false)); + + // idempotent case + tracker.fastForwardProcessedSeqNo(seqNo1); + assertThat(tracker.getProcessedCheckpoint(), equalTo(0L)); + assertThat(tracker.hasProcessed(0L), equalTo(true)); + assertThat(tracker.hasProcessed(atLeast(1)), equalTo(false)); + + } + + public void testFastForwardProcessedPersistentUpdate2() { + long seqNo1, seqNo2; + assertThat(tracker.getProcessedCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); + seqNo1 = tracker.generateSeqNo(); + seqNo2 = tracker.generateSeqNo(); + assertThat(seqNo1, equalTo(0L)); + assertThat(seqNo2, equalTo(1L)); + tracker.markSeqNoAsPersisted(seqNo1); + tracker.markSeqNoAsPersisted(seqNo2); + assertThat(tracker.getProcessedCheckpoint(), equalTo(-1L)); + assertThat(tracker.getPersistedCheckpoint(), equalTo(1L)); + + tracker.fastForwardProcessedSeqNo(seqNo2); + assertThat(tracker.getProcessedCheckpoint(), equalTo(1L)); + assertThat(tracker.hasProcessed(seqNo1), equalTo(true)); + assertThat(tracker.hasProcessed(seqNo2), equalTo(true)); + + tracker.fastForwardProcessedSeqNo(seqNo1); + assertThat(tracker.getProcessedCheckpoint(), equalTo(1L)); + assertThat(tracker.hasProcessed(between(0, 1)), equalTo(true)); + assertThat(tracker.hasProcessed(atLeast(2)), equalTo(false)); + assertThat(tracker.getMaxSeqNo(), equalTo(1L)); + } } From 452e368bdeb623f4d6b72631069a4fe5264177c6 Mon Sep 17 00:00:00 2001 From: Vacha Shah Date: Thu, 14 Apr 2022 11:07:22 -0700 Subject: [PATCH 07/41] Adding a null pointer check to fix index_prefix query (#2879) * Adding a null pointer check to fix index_prefix query Signed-off-by: Vacha Shah * Adding test Signed-off-by: Vacha Shah --- .../org/opensearch/index/mapper/TextFieldMapper.java | 4 +++- .../opensearch/index/mapper/TextFieldTypeTests.java | 11 +++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index 049c85dc910ed..4b2c20586834d 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -583,7 +583,9 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); - query.setRewriteMethod(method); + if (method != null) { + query.setRewriteMethod(method); + } return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java index 18b04c1be5668..b9ec5a07b207d 100644 --- a/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/TextFieldTypeTests.java @@ -190,6 +190,17 @@ public void testIndexPrefixes() { ); assertThat(q, equalTo(expected)); + + q = ft.prefixQuery("g", null, false, randomMockShardContext()); + automaton = Operations.concatenate(Arrays.asList(Automata.makeChar('g'), Automata.makeAnyChar())); + + expected = new ConstantScoreQuery( + new BooleanQuery.Builder().add(new AutomatonQuery(new Term("field._index_prefix", "g*"), automaton), BooleanClause.Occur.SHOULD) + .add(new TermQuery(new Term("field", "g")), BooleanClause.Occur.SHOULD) + .build() + ); + + assertThat(q, equalTo(expected)); } public void testFetchSourceValue() throws IOException { From d39c18f7fe77d886a00672613ce50119821030d1 Mon Sep 17 00:00:00 2001 From: Ankit Jain Date: Fri, 15 Apr 2022 05:06:01 +0530 Subject: [PATCH 08/41] Excluding system indices from max shard limit validator (#2894) * Excluding system indices from max shard limit validator Signed-off-by: Ankit Jain * Fixing spotless check violations Signed-off-by: Ankit Jain * Fixing NPE due to null isHidden Signed-off-by: Ankit Jain * Adding unit tests for shard opening scenario Signed-off-by: Ankit Jain * Addressing review comments Signed-off-by: Ankit Jain --- .../metadata/MetadataCreateIndexService.java | 17 +-- .../indices/ShardLimitValidator.java | 14 ++- .../org/opensearch/indices/SystemIndices.java | 25 +++++ .../main/java/org/opensearch/node/Node.java | 2 +- .../opensearch/snapshots/RestoreService.java | 6 +- .../MetadataRolloverServiceTests.java | 10 +- .../indices/ShardLimitValidatorTests.java | 106 +++++++++++++++++- .../indices/cluster/ClusterStateChanges.java | 5 +- .../snapshots/SnapshotResiliencyTests.java | 5 +- 9 files changed, 160 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index 64198dce89cef..7f2be879f3637 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -88,7 +88,6 @@ import org.opensearch.indices.IndicesService; import org.opensearch.indices.InvalidIndexNameException; import org.opensearch.indices.ShardLimitValidator; -import org.opensearch.indices.SystemIndexDescriptor; import org.opensearch.indices.SystemIndices; import org.opensearch.threadpool.ThreadPool; @@ -214,17 +213,9 @@ public void validateIndexName(String index, ClusterState state) { * @param isHidden Whether or not this is a hidden index */ public boolean validateDotIndex(String index, @Nullable Boolean isHidden) { - boolean isSystem = false; if (index.charAt(0) == '.') { - SystemIndexDescriptor matchingDescriptor = systemIndices.findMatchingDescriptor(index); - if (matchingDescriptor != null) { - logger.trace( - "index [{}] is a system index because it matches index pattern [{}] with description [{}]", - index, - matchingDescriptor.getIndexPattern(), - matchingDescriptor.getDescription() - ); - isSystem = true; + if (systemIndices.validateSystemIndex(index)) { + return true; } else if (isHidden) { logger.trace("index [{}] is a hidden index", index); } else { @@ -237,7 +228,7 @@ public boolean validateDotIndex(String index, @Nullable Boolean isHidden) { } } - return isSystem; + return false; } /** @@ -884,7 +875,7 @@ static Settings aggregateIndexSettings( * We can not validate settings until we have applied templates, otherwise we do not know the actual settings * that will be used to create this index. */ - shardLimitValidator.validateShardLimit(indexSettings, currentState); + shardLimitValidator.validateShardLimit(request.index(), indexSettings, currentState); if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(indexSettings) == false && IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(indexSettings).onOrAfter(Version.V_2_0_0)) { throw new IllegalArgumentException( diff --git a/server/src/main/java/org/opensearch/indices/ShardLimitValidator.java b/server/src/main/java/org/opensearch/indices/ShardLimitValidator.java index 3ed0dbee59e71..7e4376e8ea8de 100644 --- a/server/src/main/java/org/opensearch/indices/ShardLimitValidator.java +++ b/server/src/main/java/org/opensearch/indices/ShardLimitValidator.java @@ -63,10 +63,12 @@ public class ShardLimitValidator { Setting.Property.NodeScope ); protected final AtomicInteger shardLimitPerNode = new AtomicInteger(); + private final SystemIndices systemIndices; - public ShardLimitValidator(final Settings settings, ClusterService clusterService) { + public ShardLimitValidator(final Settings settings, ClusterService clusterService, SystemIndices systemIndices) { this.shardLimitPerNode.set(SETTING_CLUSTER_MAX_SHARDS_PER_NODE.get(settings)); clusterService.getClusterSettings().addSettingsUpdateConsumer(SETTING_CLUSTER_MAX_SHARDS_PER_NODE, this::setShardLimitPerNode); + this.systemIndices = systemIndices; } private void setShardLimitPerNode(int newValue) { @@ -84,11 +86,17 @@ public int getShardLimitPerNode() { /** * Checks whether an index can be created without going over the cluster shard limit. * + * @param indexName the name of the index being created * @param settings the settings of the index to be created * @param state the current cluster state * @throws ValidationException if creating this index would put the cluster over the cluster shard limit */ - public void validateShardLimit(final Settings settings, final ClusterState state) { + public void validateShardLimit(final String indexName, final Settings settings, final ClusterState state) { + // Validate shard limit only for non system indices as it is not hard limit anyways + if (systemIndices.validateSystemIndex(indexName)) { + return; + } + final int numberOfShards = INDEX_NUMBER_OF_SHARDS_SETTING.get(settings); final int numberOfReplicas = IndexMetadata.INDEX_NUMBER_OF_REPLICAS_SETTING.get(settings); final int shardsToCreate = numberOfShards * (1 + numberOfReplicas); @@ -111,6 +119,8 @@ public void validateShardLimit(final Settings settings, final ClusterState state */ public void validateShardLimit(ClusterState currentState, Index[] indicesToOpen) { int shardsToOpen = Arrays.stream(indicesToOpen) + // Validate shard limit only for non system indices as it is not hard limit anyways + .filter(index -> !systemIndices.validateSystemIndex(index.getName())) .filter(index -> currentState.metadata().index(index).getState().equals(IndexMetadata.State.CLOSE)) .mapToInt(index -> getTotalShardCount(currentState, index)) .sum(); diff --git a/server/src/main/java/org/opensearch/indices/SystemIndices.java b/server/src/main/java/org/opensearch/indices/SystemIndices.java index fc34645b4326f..042291554670c 100644 --- a/server/src/main/java/org/opensearch/indices/SystemIndices.java +++ b/server/src/main/java/org/opensearch/indices/SystemIndices.java @@ -32,6 +32,8 @@ package org.opensearch.indices; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; @@ -63,6 +65,8 @@ * to reduce the locations within the code that need to deal with {@link SystemIndexDescriptor}s. */ public class SystemIndices { + private static final Logger logger = LogManager.getLogger(SystemIndices.class); + private static final Map> SERVER_SYSTEM_INDEX_DESCRIPTORS = singletonMap( TaskResultsService.class.getName(), singletonList(new SystemIndexDescriptor(TASK_INDEX + "*", "Task Result Index")) @@ -135,6 +139,27 @@ public boolean isSystemIndex(String indexName) { } } + /** + * Validates (if this index has a dot-prefixed name) and it is system index. + * @param index The name of the index in question + */ + public boolean validateSystemIndex(String index) { + if (index.charAt(0) == '.') { + SystemIndexDescriptor matchingDescriptor = findMatchingDescriptor(index); + if (matchingDescriptor != null) { + logger.trace( + "index [{}] is a system index because it matches index pattern [{}] with description [{}]", + index, + matchingDescriptor.getIndexPattern(), + matchingDescriptor.getDescription() + ); + return true; + } + } + + return false; + } + private static CharacterRunAutomaton buildCharacterRunAutomaton(Collection descriptors) { Optional automaton = descriptors.stream() .map(descriptor -> Regex.simpleMatchToAutomaton(descriptor.getIndexPattern())) diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 8ede6fdf76653..46400e5c8d269 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -635,7 +635,7 @@ protected Node( final AliasValidator aliasValidator = new AliasValidator(); - final ShardLimitValidator shardLimitValidator = new ShardLimitValidator(settings, clusterService); + final ShardLimitValidator shardLimitValidator = new ShardLimitValidator(settings, clusterService, systemIndices); final MetadataCreateIndexService metadataCreateIndexService = new MetadataCreateIndexService( settings, clusterService, diff --git a/server/src/main/java/org/opensearch/snapshots/RestoreService.java b/server/src/main/java/org/opensearch/snapshots/RestoreService.java index ad5cfe6e443ff..e1b143b5f5274 100644 --- a/server/src/main/java/org/opensearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/opensearch/snapshots/RestoreService.java @@ -384,7 +384,11 @@ public ClusterState execute(ClusterState currentState) { .put(snapshotIndexMetadata.getSettings()) .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) ); - shardLimitValidator.validateShardLimit(snapshotIndexMetadata.getSettings(), currentState); + shardLimitValidator.validateShardLimit( + renamedIndexName, + snapshotIndexMetadata.getSettings(), + currentState + ); if (!request.includeAliases() && !snapshotIndexMetadata.getAliases().isEmpty()) { // Remove all aliases - they shouldn't be restored indexMdBuilder.removeAllAliases(); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index fd052308ed87b..afe35538adaf5 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -603,7 +603,8 @@ public void testRolloverClusterState() throws Exception { IndexNameExpressionResolver mockIndexNameExpressionResolver = mock(IndexNameExpressionResolver.class); when(mockIndexNameExpressionResolver.resolveDateMathExpression(any())).then(returnsFirstArg()); - ShardLimitValidator shardLimitValidator = new ShardLimitValidator(Settings.EMPTY, clusterService); + final SystemIndices systemIndices = new SystemIndices(emptyMap()); + ShardLimitValidator shardLimitValidator = new ShardLimitValidator(Settings.EMPTY, clusterService, systemIndices); MetadataCreateIndexService createIndexService = new MetadataCreateIndexService( Settings.EMPTY, clusterService, @@ -615,7 +616,7 @@ public void testRolloverClusterState() throws Exception { IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, testThreadPool, null, - new SystemIndices(emptyMap()), + systemIndices, false ); MetadataIndexAliasesService indexAliasesService = new MetadataIndexAliasesService( @@ -739,7 +740,8 @@ public void testRolloverClusterStateForDataStream() throws Exception { IndexNameExpressionResolver mockIndexNameExpressionResolver = mock(IndexNameExpressionResolver.class); when(mockIndexNameExpressionResolver.resolveDateMathExpression(any())).then(returnsFirstArg()); - ShardLimitValidator shardLimitValidator = new ShardLimitValidator(Settings.EMPTY, clusterService); + final SystemIndices systemIndices = new SystemIndices(emptyMap()); + ShardLimitValidator shardLimitValidator = new ShardLimitValidator(Settings.EMPTY, clusterService, systemIndices); MetadataCreateIndexService createIndexService = new MetadataCreateIndexService( Settings.EMPTY, clusterService, @@ -751,7 +753,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, testThreadPool, null, - new SystemIndices(emptyMap()), + systemIndices, false ); MetadataIndexAliasesService indexAliasesService = new MetadataIndexAliasesService( diff --git a/server/src/test/java/org/opensearch/indices/ShardLimitValidatorTests.java b/server/src/test/java/org/opensearch/indices/ShardLimitValidatorTests.java index 7e9c971cae1e8..a61ca13df0215 100644 --- a/server/src/test/java/org/opensearch/indices/ShardLimitValidatorTests.java +++ b/server/src/test/java/org/opensearch/indices/ShardLimitValidatorTests.java @@ -52,6 +52,8 @@ import java.util.Optional; import java.util.stream.Collectors; +import static java.util.Collections.emptyMap; +import static org.opensearch.cluster.metadata.IndexMetadata.*; import static org.opensearch.cluster.metadata.MetadataIndexStateServiceTests.addClosedIndex; import static org.opensearch.cluster.metadata.MetadataIndexStateServiceTests.addOpenedIndex; import static org.opensearch.cluster.shards.ShardCounts.forDataNodeCount; @@ -104,7 +106,54 @@ public void testUnderShardLimit() { assertFalse(errorMessage.isPresent()); } - public void testValidateShardLimit() { + /** + * This test validates that system index creation succeeds + * even though it exceeds the cluster max shard limit + */ + public void testSystemIndexCreationSucceeds() { + final ShardLimitValidator shardLimitValidator = createTestShardLimitService(1); + final Settings settings = Settings.builder() + .put(SETTING_VERSION_CREATED, Version.CURRENT) + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + final ClusterState state = createClusterForShardLimitTest(1, 1, 0); + shardLimitValidator.validateShardLimit(".tasks", settings, state); + } + + /** + * This test validates that non-system index creation + * fails when it exceeds the cluster max shard limit + */ + public void testNonSystemIndexCreationFails() { + final ShardLimitValidator shardLimitValidator = createTestShardLimitService(1); + final Settings settings = Settings.builder() + .put(SETTING_VERSION_CREATED, Version.CURRENT) + .put(SETTING_NUMBER_OF_SHARDS, 1) + .put(SETTING_NUMBER_OF_REPLICAS, 1) + .build(); + final ClusterState state = createClusterForShardLimitTest(1, 1, 0); + final ValidationException exception = expectThrows( + ValidationException.class, + () -> shardLimitValidator.validateShardLimit("abc", settings, state) + ); + assertEquals( + "Validation Failed: 1: this action would add [" + + 2 + + "] total shards, but this cluster currently has [" + + 1 + + "]/[" + + 1 + + "] maximum shards open;", + exception.getMessage() + ); + } + + /** + * This test validates that non-system index opening + * fails when it exceeds the cluster max shard limit + */ + public void testNonSystemIndexOpeningFails() { int nodesInCluster = randomIntBetween(2, 90); ShardCounts counts = forDataNodeCount(nodesInCluster); ClusterState state = createClusterForShardLimitTest( @@ -140,6 +189,33 @@ public void testValidateShardLimit() { ); } + /** + * This test validates that system index opening succeeds + * even when it exceeds the cluster max shard limit + */ + public void testSystemIndexOpeningSucceeds() { + int nodesInCluster = randomIntBetween(2, 90); + ShardCounts counts = forDataNodeCount(nodesInCluster); + ClusterState state = createClusterForShardLimitTest( + nodesInCluster, + randomAlphaOfLengthBetween(5, 15), + counts.getFirstIndexShards(), + counts.getFirstIndexReplicas(), + ".tasks", // Adding closed system index to cluster state + counts.getFailingIndexShards(), + counts.getFailingIndexReplicas() + ); + + Index[] indices = Arrays.stream(state.metadata().indices().values().toArray(IndexMetadata.class)) + .map(IndexMetadata::getIndex) + .collect(Collectors.toList()) + .toArray(new Index[2]); + + // Shard limit validation succeeds without any issues as system index is being opened + ShardLimitValidator shardLimitValidator = createTestShardLimitService(counts.getShardsPerNode()); + shardLimitValidator.validateShardLimit(state, indices); + } + public static ClusterState createClusterForShardLimitTest(int nodesInCluster, int shardsInIndex, int replicas) { ImmutableOpenMap.Builder dataNodes = ImmutableOpenMap.builder(); for (int i = 0; i < nodesInCluster; i++) { @@ -165,8 +241,10 @@ public static ClusterState createClusterForShardLimitTest(int nodesInCluster, in public static ClusterState createClusterForShardLimitTest( int nodesInCluster, + String openIndexName, int openIndexShards, int openIndexReplicas, + String closeIndexName, int closedIndexShards, int closedIndexReplicas ) { @@ -178,8 +256,8 @@ public static ClusterState createClusterForShardLimitTest( when(nodes.getDataNodes()).thenReturn(dataNodes.build()); ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build(); - state = addOpenedIndex(randomAlphaOfLengthBetween(5, 15), openIndexShards, openIndexReplicas, state); - state = addClosedIndex(randomAlphaOfLengthBetween(5, 15), closedIndexShards, closedIndexReplicas, state); + state = addOpenedIndex(openIndexName, openIndexShards, openIndexReplicas, state); + state = addClosedIndex(closeIndexName, closedIndexShards, closedIndexReplicas, state); final Metadata.Builder metadata = Metadata.builder(state.metadata()); if (randomBoolean()) { @@ -190,6 +268,24 @@ public static ClusterState createClusterForShardLimitTest( return ClusterState.builder(state).metadata(metadata).nodes(nodes).build(); } + public static ClusterState createClusterForShardLimitTest( + int nodesInCluster, + int openIndexShards, + int openIndexReplicas, + int closedIndexShards, + int closedIndexReplicas + ) { + return createClusterForShardLimitTest( + nodesInCluster, + randomAlphaOfLengthBetween(5, 15), + openIndexShards, + openIndexReplicas, + randomAlphaOfLengthBetween(5, 15), + closedIndexShards, + closedIndexReplicas + ); + } + /** * Creates a {@link ShardLimitValidator} for testing with the given setting and a mocked cluster service. * @@ -204,7 +300,7 @@ public static ShardLimitValidator createTestShardLimitService(int maxShardsPerNo new ClusterSettings(limitOnlySettings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) ); - return new ShardLimitValidator(limitOnlySettings, clusterService); + return new ShardLimitValidator(limitOnlySettings, clusterService, new SystemIndices(emptyMap())); } /** @@ -217,6 +313,6 @@ public static ShardLimitValidator createTestShardLimitService(int maxShardsPerNo public static ShardLimitValidator createTestShardLimitService(int maxShardsPerNode, ClusterService clusterService) { Settings limitOnlySettings = Settings.builder().put(SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), maxShardsPerNode).build(); - return new ShardLimitValidator(limitOnlySettings, clusterService); + return new ShardLimitValidator(limitOnlySettings, clusterService, new SystemIndices(emptyMap())); } } diff --git a/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java index 99ec043cc7801..a7d9ba0bf3d4b 100644 --- a/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java @@ -259,7 +259,8 @@ public IndexMetadata upgradeIndexMetadata(IndexMetadata indexMetadata, Version m null, actionFilters ); - ShardLimitValidator shardLimitValidator = new ShardLimitValidator(SETTINGS, clusterService); + final SystemIndices systemIndices = new SystemIndices(emptyMap()); + ShardLimitValidator shardLimitValidator = new ShardLimitValidator(SETTINGS, clusterService, systemIndices); MetadataIndexStateService indexStateService = new MetadataIndexStateService( clusterService, allocationService, @@ -290,7 +291,7 @@ public IndexMetadata upgradeIndexMetadata(IndexMetadata indexMetadata, Version m IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, threadPool, xContentRegistry, - new SystemIndices(emptyMap()), + systemIndices, true ); diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 26e19e532b6bc..a896aab0f70c9 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -1863,7 +1863,8 @@ public void onFailure(final Exception e) { RetentionLeaseSyncer.EMPTY ); Map actions = new HashMap<>(); - final ShardLimitValidator shardLimitValidator = new ShardLimitValidator(settings, clusterService); + final SystemIndices systemIndices = new SystemIndices(emptyMap()); + final ShardLimitValidator shardLimitValidator = new ShardLimitValidator(settings, clusterService, systemIndices); final MetadataCreateIndexService metadataCreateIndexService = new MetadataCreateIndexService( settings, clusterService, @@ -1875,7 +1876,7 @@ public void onFailure(final Exception e) { indexScopedSettings, threadPool, namedXContentRegistry, - new SystemIndices(emptyMap()), + systemIndices, false ); actions.put( From a6b4967dcf39b7ff3d47aff0a75314becbce72f4 Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Fri, 15 Apr 2022 11:10:42 -0400 Subject: [PATCH 09/41] Adding @reta to OpenSearch maintainers. (#2905) * Adding @reta to OpenSearch maintainers. Signed-off-by: dblock * Update Andrew Ross github id Signed-off-by: Nicholas Walter Knize Co-authored-by: Nick Knize --- MAINTAINERS.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index db6cd6c0f3309..7aa77cad0e713 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -3,27 +3,27 @@ | Maintainer | GitHub ID | Affiliation | | --------------- | --------- | ----------- | | Abbas Hussain | [abbashus](https://github.com/abbashus) | Amazon | +| Anas Alkouz | [anasalkouz](https://github.com/anasalkouz) | Amazon | +| Andrew Ross | [andrross](https://github.com/andrross)| Amazon | +| Andriy Redko | [reta](https://github.com/reta) | Aiven | | Charlotte Henkle | [CEHENKLE](https://github.com/CEHENKLE) | Amazon | +| Daniel "dB." Doubrovkine | [dblock](https://github.com/dblock) | Amazon | +| Gopala Krishna Ambareesh | [krishna-ggk](https://github.com/krishna-ggk) |Amazon | | Himanshu Setia | [setiah](https://github.com/setiah) | Amazon | +| Itiyama Sadana | [itiyamas](https://github.com/itiyamas) | Amazon | +| Kartik Ganesh | [kartg](https://github.com/kartg) | Amazon | +| Marc Handalian | [mch2](https://github.com/mch2) | Amazon | +| Megha Sai Kavikondala | [meghasaik](https://github.com/meghasaik) | Amazon | | Nick Knize | [nknize](https://github.com/nknize) | Amazon | +| Owais Kazi | [owaiskazi19](https://github.com/owaiskazi19) | Amazon | | Rabi Panda | [adnapibar](https://github.com/adnapibar) | Amazon | +| Rishikesh Pasham | [Rishikesh1159](https://github.com/Rishikesh1159) | Amazon| +| Ryan Bogan | [ryanbogan](https://github.com/ryanbogan) | Amazon | | Sarat Vemulapalli | [saratvemulapalli](https://github.com/saratvemulapalli) | Amazon | -| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon | -| Gopala Krishna Ambareesh | [krishna-ggk](https://github.com/krishna-ggk) |Amazon | -| Vengadanathan Srinivasan | [vengadanathan-s](https://github.com/vengadanathan-s) | Amazon | | Shweta Thareja |[shwetathareja](https://github.com/shwetathareja) | Amazon | -| Itiyama Sadana | [itiyamas](https://github.com/itiyamas) | Amazon | -| Daniel "dB." Doubrovkine | [dblock](https://github.com/dblock) | Amazon | -| Andrew Ross | [andross](https://github.com/andrross)| Amazon | +| Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon | | Vacha Shah | [VachaShah](https://github.com/VachaShah) | Amazon | -| Anas Alkouz | [anasalkouz](https://github.com/anasalkouz) | Amazon | -| Megha Sai Kavikondala | [meghasaik](https://github.com/meghasaik) | Amazon | -| Rishikesh Pasham | [Rishikesh1159](https://github.com/Rishikesh1159) | Amazon| +| Vengadanathan Srinivasan | [vengadanathan-s](https://github.com/vengadanathan-s) | Amazon | | Xue Zhou | [xuezhou25](https://github.com/xuezhou25) | Amazon | -| Kartik Ganesh | [kartg](https://github.com/kartg) | Amazon | -| Marc Handalian | [mch2](https://github.com/mch2) | Amazon | -| Ryan Bogan | [ryanbogan](https://github.com/ryanbogan) | Amazon | -| Owais Kazi | [owaiskazi19](https://github.com/owaiskazi19) | Amazon | - [This document](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md) explains what maintainers do in this repo, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). From ca9151fa24222ff33ca0cc412b5c1ba8b9c65158 Mon Sep 17 00:00:00 2001 From: Marc Handalian Date: Fri, 15 Apr 2022 13:09:33 -0700 Subject: [PATCH 10/41] Add Github Workflow to build and publish lucene snapshots. (#2906) This change introduces a github workflow so that we can build and push snapshots of lucene. The RepositoriesSetupPlugin is also updated with a url from where these snapshots can be retrieved. Signed-off-by: Marc Handalian --- .github/workflows/lucene-snapshots.yml | 55 +++++++++++++++++++ DEVELOPER_GUIDE.md | 6 ++ .../gradle/RepositoriesSetupPlugin.java | 3 +- 3 files changed, 62 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/lucene-snapshots.yml diff --git a/.github/workflows/lucene-snapshots.yml b/.github/workflows/lucene-snapshots.yml new file mode 100644 index 0000000000000..0fe025ad1aa16 --- /dev/null +++ b/.github/workflows/lucene-snapshots.yml @@ -0,0 +1,55 @@ +# This workflow will check out, build, and publish snapshots of lucene. + +name: OpenSearch Lucene snapshots + +on: + workflow_dispatch: + # Inputs the workflow accepts. + inputs: + ref: + description: + required: false + default: 'main' + +jobs: + publish-snapshots: + runs-on: ubuntu-latest + # These permissions are needed to interact with GitHub's OIDC Token endpoint. + permissions: + id-token: write + contents: read + + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 17 + uses: actions/setup-java@v2 + with: + java-version: '17' + distribution: 'adopt' + + - name: Checkout Lucene + uses: actions/checkout@v2 + with: + repository: 'apache/lucene' + path: lucene + ref: ${{ github.event.inputs.ref }} + + - name: Set hash + working-directory: ./lucene + run: | + echo "::set-output name=REVISION::$(git rev-parse --short HEAD)" + id: version + + - name: Publish Lucene to local maven repo. + working-directory: ./lucene + run: ./gradlew publishJarsPublicationToMavenLocal -Pversion.suffix=snapshot-${{ steps.version.outputs.REVISION }} + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + role-to-assume: ${{ secrets.LUCENE_SNAPSHOTS_ROLE }} + aws-region: us-west-2 + + - name: Copy files to S3 with the aws CLI. + run: | + aws s3 cp ~/.m2/repository/org/apache/lucene/ s3://${{ secrets.LUCENE_SNAPSHOTS_BUCKET }}/snapshots/lucene/org/apache/lucene/ --recursive --no-progress diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 11fcb324c8cae..70abfda767353 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -49,6 +49,7 @@ - [Submitting Changes](#submitting-changes) - [Backports](#backports) - [LineLint](#linelint) + - [Lucene Snapshots](#lucene-snapshots) # Developer Guide @@ -488,3 +489,8 @@ Executing the binary will automatically search the local directory tree for lint Pass a list of files or directories to limit your search. linelint README.md LICENSE + +# Lucene Snapshots +The Github workflow in [lucene-snapshots.yml](.github/workflows/lucene-snapshots.yml) is a Github worfklow executable by maintainers to build a top-down snapshot build of lucene. +These snapshots are available to test compatibility with upcoming changes to Lucene by updating the version at [version.properties](buildsrc/version.properties) with the `version-snapshot-sha` version. +Example: `lucene = 10.0.0-snapshot-2e941fc`. diff --git a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java index 30847f0648c5c..63b88f671c84c 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/RepositoriesSetupPlugin.java @@ -92,10 +92,9 @@ public static void configureRepositories(Project project) { throw new GradleException("Malformed lucene snapshot version: " + luceneVersion); } String revision = matcher.group(1); - // TODO(cleanup) - Setup own lucene snapshot repo MavenArtifactRepository luceneRepo = repos.maven(repo -> { repo.setName("lucene-snapshots"); - repo.setUrl("https://artifacts.opensearch.org/snapshots/lucene/"); + repo.setUrl("https://d1nvenhzbhpy0q.cloudfront.net/snapshots/lucene/"); }); repos.exclusiveContent(exclusiveRepo -> { exclusiveRepo.filter( From 1eda2bbe3ad5d60d10c60d5e4fd1e8711198b4c4 Mon Sep 17 00:00:00 2001 From: Sarat Vemulapalli Date: Fri, 15 Apr 2022 16:07:10 -0700 Subject: [PATCH 11/41] Fixing Scaled float field mapper to respect ignoreMalformed setting (#2918) * Fixing Scaled float field mapper to respect ignoreMalformed setting Signed-off-by: Sarat Vemulapalli * Adding unit tests Signed-off-by: Sarat Vemulapalli --- .../org/opensearch/index/mapper/ScaledFloatFieldMapper.java | 3 ++- .../opensearch/index/mapper/ScaledFloatFieldMapperTests.java | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java index 78a9e389eb63f..73ce1cf96d7d0 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/ScaledFloatFieldMapper.java @@ -32,6 +32,7 @@ package org.opensearch.index.mapper; +import com.fasterxml.jackson.core.JsonParseException; import org.apache.lucene.document.Field; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; @@ -392,7 +393,7 @@ protected void parseCreateField(ParseContext context) throws IOException { } else { try { numericValue = parse(parser, coerce.value()); - } catch (IllegalArgumentException e) { + } catch (IllegalArgumentException | JsonParseException e) { if (ignoreMalformed.value()) { return; } else { diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java index 3de322b286183..e19f9dd7988e1 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/ScaledFloatFieldMapperTests.java @@ -229,6 +229,7 @@ public void testCoerce() throws Exception { public void testIgnoreMalformed() throws Exception { doTestIgnoreMalformed("a", "For input string: \"a\""); + doTestIgnoreMalformed(true, "Current token (VALUE_TRUE) not numeric"); List values = Arrays.asList("NaN", "Infinity", "-Infinity"); for (String value : values) { @@ -236,7 +237,7 @@ public void testIgnoreMalformed() throws Exception { } } - private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception { + private void doTestIgnoreMalformed(Object value, String exceptionMessageContains) throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(this::minimalMapping)); ThrowingRunnable runnable = () -> mapper.parse( new SourceToParse( From eba19351786944f6d9b3285c257c1637d18e5867 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 10:06:02 -0700 Subject: [PATCH 12/41] Bump org.gradle.test-retry from 1.3.1 to 1.3.2 (#2940) Bumps org.gradle.test-retry from 1.3.1 to 1.3.2. --- updated-dependencies: - dependency-name: org.gradle.test-retry dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index a41ad61de39a6..7949872a85b86 100644 --- a/build.gradle +++ b/build.gradle @@ -49,7 +49,7 @@ plugins { id 'opensearch.docker-support' id 'opensearch.global-build-info' id "com.diffplug.spotless" version "6.4.2" apply false - id "org.gradle.test-retry" version "1.3.1" apply false + id "org.gradle.test-retry" version "1.3.2" apply false } apply from: 'gradle/build-complete.gradle' From fc378c7256bc1e46f4c1cbee1ac6360624e1e574 Mon Sep 17 00:00:00 2001 From: Vacha Shah Date: Mon, 18 Apr 2022 10:07:14 -0700 Subject: [PATCH 13/41] Adding asm to version file and upgrading (#2933) Signed-off-by: Vacha Shah --- buildSrc/version.properties | 1 + modules/lang-expression/build.gradle | 6 +++--- .../lang-expression/licenses/asm-9.2.jar.sha1 | 1 - .../lang-expression/licenses/asm-9.3.jar.sha1 | 1 + .../licenses/asm-commons-9.2.jar.sha1 | 1 - .../licenses/asm-commons-9.3.jar.sha1 | 1 + .../licenses/asm-tree-9.2.jar.sha1 | 1 - .../licenses/asm-tree-9.3.jar.sha1 | 1 + modules/lang-painless/build.gradle | 20 +++++++++---------- .../lang-painless/licenses/asm-9.2.jar.sha1 | 1 - .../lang-painless/licenses/asm-9.3.jar.sha1 | 1 + .../licenses/asm-analysis-9.2.jar.sha1 | 1 - .../licenses/asm-analysis-9.3.jar.sha1 | 1 + .../licenses/asm-commons-9.2.jar.sha1 | 1 - .../licenses/asm-commons-9.3.jar.sha1 | 1 + .../licenses/asm-tree-9.2.jar.sha1 | 1 - .../licenses/asm-tree-9.3.jar.sha1 | 1 + .../licenses/asm-util-9.2.jar.sha1 | 1 - .../licenses/asm-util-9.3.jar.sha1 | 1 + test/logger-usage/build.gradle | 6 +++--- 20 files changed, 25 insertions(+), 24 deletions(-) delete mode 100644 modules/lang-expression/licenses/asm-9.2.jar.sha1 create mode 100644 modules/lang-expression/licenses/asm-9.3.jar.sha1 delete mode 100644 modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 create mode 100644 modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 delete mode 100644 modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 create mode 100644 modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 delete mode 100644 modules/lang-painless/licenses/asm-9.2.jar.sha1 create mode 100644 modules/lang-painless/licenses/asm-9.3.jar.sha1 delete mode 100644 modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1 create mode 100644 modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 delete mode 100644 modules/lang-painless/licenses/asm-commons-9.2.jar.sha1 create mode 100644 modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 delete mode 100644 modules/lang-painless/licenses/asm-tree-9.2.jar.sha1 create mode 100644 modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 delete mode 100644 modules/lang-painless/licenses/asm-util-9.2.jar.sha1 create mode 100644 modules/lang-painless/licenses/asm-util-9.3.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 7ae3bfaa19b5a..d3499b0df599b 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -16,6 +16,7 @@ icu4j = 70.1 supercsv = 2.4.0 log4j = 2.17.1 slf4j = 1.6.2 +asm = 9.3 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.5.0 diff --git a/modules/lang-expression/build.gradle b/modules/lang-expression/build.gradle index 9d7b0e2f0979c..203c332069c5f 100644 --- a/modules/lang-expression/build.gradle +++ b/modules/lang-expression/build.gradle @@ -38,9 +38,9 @@ opensearchplugin { dependencies { api "org.apache.lucene:lucene-expressions:${versions.lucene}" api 'org.antlr:antlr4-runtime:4.9.3' - api 'org.ow2.asm:asm:9.2' - api 'org.ow2.asm:asm-commons:9.2' - api 'org.ow2.asm:asm-tree:9.2' + api "org.ow2.asm:asm:${versions.asm}" + api "org.ow2.asm:asm-commons:${versions.asm}" + api "org.ow2.asm:asm-tree:${versions.asm}" } restResources { restApi { diff --git a/modules/lang-expression/licenses/asm-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-9.2.jar.sha1 deleted file mode 100644 index 28f456d3cbcb2..0000000000000 --- a/modules/lang-expression/licenses/asm-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -81a03f76019c67362299c40e0ba13405f5467bff \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-9.3.jar.sha1 new file mode 100644 index 0000000000000..71d3966a6f6f9 --- /dev/null +++ b/modules/lang-expression/licenses/asm-9.3.jar.sha1 @@ -0,0 +1 @@ +8e6300ef51c1d801a7ed62d07cd221aca3a90640 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 deleted file mode 100644 index 7beb3d29afe86..0000000000000 --- a/modules/lang-expression/licenses/asm-commons-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f4d7f0fc9054386f2893b602454d48e07d4fbead \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 new file mode 100644 index 0000000000000..fd7cd4943a57c --- /dev/null +++ b/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 @@ -0,0 +1 @@ +1f2a432d1212f5c352ae607d7b61dcae20c20af5 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 deleted file mode 100644 index 7b486521ecef3..0000000000000 --- a/modules/lang-expression/licenses/asm-tree-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d96c99a30f5e1a19b0e609dbb19a44d8518ac01e \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 new file mode 100644 index 0000000000000..238f0006424d3 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 @@ -0,0 +1 @@ +78d2ecd61318b5a58cd04fb237636c0e86b77d97 \ No newline at end of file diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index 069158fb678ef..f8e25c20cbf15 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -49,11 +49,11 @@ testClusters.all { dependencies { api 'org.antlr:antlr4-runtime:4.9.3' - api 'org.ow2.asm:asm-util:9.2' - api 'org.ow2.asm:asm-tree:9.2' - api 'org.ow2.asm:asm-commons:9.2' - api 'org.ow2.asm:asm-analysis:9.2' - api 'org.ow2.asm:asm:9.2' + api "org.ow2.asm:asm-util:${versions.asm}" + api "org.ow2.asm:asm-tree:${versions.asm}" + api "org.ow2.asm:asm-commons:${versions.asm}" + api "org.ow2.asm:asm-analysis:${versions.asm}" + api "org.ow2.asm:asm:${versions.asm}" api project('spi') } @@ -69,11 +69,11 @@ shadowJar { classifier = null relocate 'org.objectweb', 'org.opensearch.repackage.org.objectweb' dependencies { - include(dependency('org.ow2.asm:asm:9.2')) - include(dependency('org.ow2.asm:asm-util:9.2')) - include(dependency('org.ow2.asm:asm-tree:9.2')) - include(dependency('org.ow2.asm:asm-commons:9.2')) - include(dependency('org.ow2.asm:asm-analysis:9.2')) + include(dependency("org.ow2.asm:asm:${versions.asm}")) + include(dependency("org.ow2.asm:asm-util:${versions.asm}")) + include(dependency("org.ow2.asm:asm-tree:${versions.asm}")) + include(dependency("org.ow2.asm:asm-commons:${versions.asm}")) + include(dependency("org.ow2.asm:asm-analysis:${versions.asm}")) } } diff --git a/modules/lang-painless/licenses/asm-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-9.2.jar.sha1 deleted file mode 100644 index 28f456d3cbcb2..0000000000000 --- a/modules/lang-painless/licenses/asm-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -81a03f76019c67362299c40e0ba13405f5467bff \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-9.3.jar.sha1 new file mode 100644 index 0000000000000..71d3966a6f6f9 --- /dev/null +++ b/modules/lang-painless/licenses/asm-9.3.jar.sha1 @@ -0,0 +1 @@ +8e6300ef51c1d801a7ed62d07cd221aca3a90640 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1 deleted file mode 100644 index b93483a24da5d..0000000000000 --- a/modules/lang-painless/licenses/asm-analysis-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7487dd756daf96cab9986e44b9d7bcb796a61c10 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 new file mode 100644 index 0000000000000..f5a04d0196823 --- /dev/null +++ b/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 @@ -0,0 +1 @@ +4b071f211b37c38e0e9f5998550197c8593f6ad8 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-commons-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.2.jar.sha1 deleted file mode 100644 index 7beb3d29afe86..0000000000000 --- a/modules/lang-painless/licenses/asm-commons-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f4d7f0fc9054386f2893b602454d48e07d4fbead \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 new file mode 100644 index 0000000000000..fd7cd4943a57c --- /dev/null +++ b/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 @@ -0,0 +1 @@ +1f2a432d1212f5c352ae607d7b61dcae20c20af5 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1 deleted file mode 100644 index 7b486521ecef3..0000000000000 --- a/modules/lang-painless/licenses/asm-tree-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d96c99a30f5e1a19b0e609dbb19a44d8518ac01e \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 new file mode 100644 index 0000000000000..238f0006424d3 --- /dev/null +++ b/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 @@ -0,0 +1 @@ +78d2ecd61318b5a58cd04fb237636c0e86b77d97 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 deleted file mode 100644 index 5cb89aa115f30..0000000000000 --- a/modules/lang-painless/licenses/asm-util-9.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fbc178fc5ba3dab50fd7e8a5317b8b647c8e8946 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 new file mode 100644 index 0000000000000..8859c317794ba --- /dev/null +++ b/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 @@ -0,0 +1 @@ +9595bc05510d0bd4b610188b77333fe4851a1975 \ No newline at end of file diff --git a/test/logger-usage/build.gradle b/test/logger-usage/build.gradle index 3154e556a87cf..e81cdef04df1f 100644 --- a/test/logger-usage/build.gradle +++ b/test/logger-usage/build.gradle @@ -31,9 +31,9 @@ apply plugin: 'opensearch.java' dependencies { - api 'org.ow2.asm:asm:9.2' - api 'org.ow2.asm:asm-tree:9.2' - api 'org.ow2.asm:asm-analysis:9.3' + api "org.ow2.asm:asm:${versions.asm}" + api "org.ow2.asm:asm-tree:${versions.asm}" + api "org.ow2.asm:asm-analysis:${versions.asm}" api "org.apache.logging.log4j:log4j-api:${versions.log4j}" testImplementation project(":test:framework") } From d61d170332be07b809cc982f0a5dc5a3d6148d77 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Mon, 18 Apr 2022 13:16:22 -0400 Subject: [PATCH 14/41] Removed java11 source folders since JDK-11 is the baseline now (#2898) Signed-off-by: Andriy Redko --- .../org/opensearch/common/collect/List.java | 34 +-- .../org/opensearch/common/collect/Map.java | 102 +++------ .../org/opensearch/common/collect/Set.java | 36 +--- .../opensearch/core/internal/io/Streams.java | 9 +- .../org/opensearch/common/collect/List.java | 94 --------- .../org/opensearch/common/collect/Map.java | 194 ------------------ .../org/opensearch/common/collect/Set.java | 95 --------- .../opensearch/core/internal/io/Streams.java | 101 --------- .../org/opensearch/monitor/jvm/JvmPid.java | 23 +-- .../org/opensearch/monitor/jvm/JvmPid.java | 41 ---- 10 files changed, 55 insertions(+), 674 deletions(-) delete mode 100644 libs/core/src/main/java11/org/opensearch/common/collect/List.java delete mode 100644 libs/core/src/main/java11/org/opensearch/common/collect/Map.java delete mode 100644 libs/core/src/main/java11/org/opensearch/common/collect/Set.java delete mode 100644 libs/core/src/main/java11/org/opensearch/core/internal/io/Streams.java delete mode 100644 server/src/main/java11/org/opensearch/monitor/jvm/JvmPid.java diff --git a/libs/core/src/main/java/org/opensearch/common/collect/List.java b/libs/core/src/main/java/org/opensearch/common/collect/List.java index 96bdacc276323..56216d6bbafe2 100644 --- a/libs/core/src/main/java/org/opensearch/common/collect/List.java +++ b/libs/core/src/main/java/org/opensearch/common/collect/List.java @@ -32,48 +32,44 @@ package org.opensearch.common.collect; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; public class List { /** - * Returns an unmodifiable list containing zero elements. + * Delegates to the Java9 {@code List.of()} method. * * @param the {@code List}'s element type * @return an empty {@code List} */ public static java.util.List of() { - return Collections.emptyList(); + return java.util.List.of(); } /** - * Returns an unmodifiable list containing one element. + * Delegates to the Java9 {@code List.of()} method. * * @param the {@code List}'s element type * @param e1 the single element * @return a {@code List} containing the specified element */ public static java.util.List of(T e1) { - return Collections.singletonList(e1); + return java.util.List.of(e1); } /** - * Returns an unmodifiable list containing two elements. + * Delegates to the Java9 {@code List.of()} method. * * @param the {@code List}'s element type - * @param e1 the first element - * @param e2 the second element + * @param e1 the single element * @return a {@code List} containing the specified element */ - @SuppressWarnings("unchecked") public static java.util.List of(T e1, T e2) { - return List.of((T[]) new Object[] { e1, e2 }); + return java.util.List.of(e1, e2); } /** - * Returns an unmodifiable list containing an arbitrary number of elements. + * Delegates to the Java9 {@code List.of()} method. * * @param entries the elements to be contained in the list * @param the {@code List}'s element type @@ -82,25 +78,17 @@ public static java.util.List of(T e1, T e2) { @SafeVarargs @SuppressWarnings("varargs") public static java.util.List of(T... entries) { - switch (entries.length) { - case 0: - return List.of(); - case 1: - return List.of(entries[0]); - default: - return Collections.unmodifiableList(Arrays.asList(entries)); - } + return java.util.List.of(entries); } /** - * Returns an unmodifiable {@code List} containing the elements of the given {@code Collection} in iteration order. + * Delegates to the Java9 {@code List.copyOf()} method. * * @param the {@code List}'s element type * @param coll a {@code Collection} from which elements are drawn, must be non-null * @return a {@code List} containing the elements of the given {@code Collection} */ - @SuppressWarnings("unchecked") public static java.util.List copyOf(Collection coll) { - return (java.util.List) List.of(coll.toArray()); + return java.util.List.copyOf(coll); } } diff --git a/libs/core/src/main/java/org/opensearch/common/collect/Map.java b/libs/core/src/main/java/org/opensearch/common/collect/Map.java index 3b401ee0e1c1b..21de546869390 100644 --- a/libs/core/src/main/java/org/opensearch/common/collect/Map.java +++ b/libs/core/src/main/java/org/opensearch/common/collect/Map.java @@ -32,70 +32,66 @@ package org.opensearch.common.collect; -import java.util.AbstractMap; -import java.util.Collections; -import java.util.HashMap; - public class Map { /** - * Returns an unmodifiable map containing one mapping. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of() { - return Collections.emptyMap(); + return java.util.Map.of(); } /** - * Returns an unmodifiable map containing one mapping. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1) { - return Collections.singletonMap(k1, v1); + return java.util.Map.of(k1, v1); } /** - * Returns an unmodifiable map containing two mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2) { - return mapN(k1, v1, k2, v2); + return java.util.Map.of(k1, v1, k2, v2); } /** - * Returns an unmodifiable map containing three mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3) { - return mapN(k1, v1, k2, v2, k3, v3); + return java.util.Map.of(k1, v1, k2, v2, k3, v3); } /** - * Returns an unmodifiable map containing four mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4); } /** - * Returns an unmodifiable map containing five mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5); } /** - * Returns an unmodifiable map containing six mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6); } /** - * Returns an unmodifiable map containing seven mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7); } /** - * Returns an unmodifiable map containing eight mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of( K k1, @@ -115,11 +111,11 @@ public static java.util.Map of( K k8, V v8 ) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8); } /** - * Returns an unmodifiable map containing nine mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of( K k1, @@ -141,11 +137,11 @@ public static java.util.Map of( K k9, V v9 ) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9); + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9); } /** - * Returns an unmodifiable map containing ten mappings. + * Delegates to the Java9 {@code Map.of()} method. */ public static java.util.Map of( K k1, @@ -169,68 +165,30 @@ public static java.util.Map of( K k10, V v10 ) { - return mapN(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9, k10, v10); - } - - @SuppressWarnings("unchecked") - private static java.util.Map mapN(Object... objects) { - if (objects.length % 2 != 0) { - throw new IllegalStateException("Must provide an even number of arguments to Map::of method"); - } - switch (objects.length) { - case 0: - return Map.of(); - case 2: - return Map.of((K) objects[0], (V) objects[1]); - default: - HashMap map = new HashMap<>(); - for (int k = 0; k < objects.length / 2; k++) { - map.put((K) objects[k * 2], (V) objects[k * 2 + 1]); - } - return Collections.unmodifiableMap(map); - } + return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9, k10, v10); } /** - * Returns an unmodifiable map containing keys and values extracted from the given entries. - * - * @param the {@code Map}'s key type - * @param the {@code Map}'s value type - * @param entries {@code Map.Entry}s containing the keys and values from which the map is populated - * @return a {@code Map} containing the specified mappings + * Delegates to the Java9 {@code Map.ofEntries()} method. */ @SafeVarargs + @SuppressWarnings("varargs") public static java.util.Map ofEntries(java.util.Map.Entry... entries) { - if (entries.length == 0) { - return Collections.emptyMap(); - } else if (entries.length == 1) { - return Collections.singletonMap(entries[0].getKey(), entries[0].getValue()); - } else { - HashMap map = new HashMap<>(); - for (java.util.Map.Entry entry : entries) { - map.put(entry.getKey(), entry.getValue()); - } - return Collections.unmodifiableMap(map); - } + return java.util.Map.ofEntries(entries); } /** - * Returns an unmodifiable Map.Entry for the provided key and value. + * Delegates to the Java9 {@code Map.entry()} method. */ public static java.util.Map.Entry entry(K k, V v) { - return new AbstractMap.SimpleImmutableEntry<>(k, v); + return java.util.Map.entry(k, v); } /** - * Returns an unmodifiable {@code Map} containing the entries of the given {@code Map}. - * - * @param the {@code Map}'s key type - * @param the {@code Map}'s value type - * @param map a {@code Map} from which entries are drawn, must be non-null - * @return a {@code Map} containing the entries of the given {@code Map} + * Delegates to the Java10 {@code Map.copyOf()} method. */ - @SuppressWarnings({ "unchecked", "rawtypes" }) public static java.util.Map copyOf(java.util.Map map) { - return (java.util.Map) Map.ofEntries(map.entrySet().toArray(new java.util.Map.Entry[0])); + return java.util.Map.copyOf(map); } + } diff --git a/libs/core/src/main/java/org/opensearch/common/collect/Set.java b/libs/core/src/main/java/org/opensearch/common/collect/Set.java index 921408b88241f..0350023e4e894 100644 --- a/libs/core/src/main/java/org/opensearch/common/collect/Set.java +++ b/libs/core/src/main/java/org/opensearch/common/collect/Set.java @@ -32,49 +32,45 @@ package org.opensearch.common.collect; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; public class Set { /** - * Returns an unmodifiable set containing zero elements. + * Delegates to the Java9 {@code Set.of()} method. * * @param the {@code Set}'s element type * @return an empty {@code Set} */ public static java.util.Set of() { - return Collections.emptySet(); + return java.util.Set.of(); } /** - * Returns an unmodifiable set containing one element. + * Delegates to the Java9 {@code Set.of()} method. * * @param the {@code Set}'s element type * @param e1 the single element * @return a {@code Set} containing the specified element */ public static java.util.Set of(T e1) { - return Collections.singleton(e1); + return java.util.Set.of(e1); } /** - * Returns an unmodifiable set containing two elements. + * Delegates to the Java9 {@code Set.of()} method. * * @param the {@code Set}'s element type * @param e1 the first element * @param e2 the second element * @return a {@code Set} containing the specified element */ - @SuppressWarnings("unchecked") public static java.util.Set of(T e1, T e2) { - return Set.of((T[]) new Object[] { e1, e2 }); + return java.util.Set.of(e1, e2); } /** - * Returns an unmodifiable set containing an arbitrary number of elements. + * Delegates to the Java9 {@code Set.of()} method. * * @param entries the elements to be contained in the set * @param the {@code Set}'s element type @@ -83,27 +79,17 @@ public static java.util.Set of(T e1, T e2) { @SafeVarargs @SuppressWarnings("varargs") public static java.util.Set of(T... entries) { - switch (entries.length) { - case 0: - return Set.of(); - case 1: - return Set.of(entries[0]); - default: - return Collections.unmodifiableSet(new HashSet<>(Arrays.asList(entries))); - } + return java.util.Set.of(entries); } /** - * Returns an unmodifiable {@code Set} containing the elements of the given Collection. + * Delegates to the Java10 {@code Set.copyOf} method. * - * @param the {@code Set}'s element type + * @param the {@code Set}'s element type * @param coll a {@code Collection} from which elements are drawn, must be non-null * @return a {@code Set} containing the elements of the given {@code Collection} - * @throws NullPointerException if coll is null, or if it contains any nulls - * @since 10 */ - @SuppressWarnings("unchecked") public static java.util.Set copyOf(Collection coll) { - return (java.util.Set) Set.of(new HashSet<>(coll).toArray()); + return java.util.Set.copyOf(coll); } } diff --git a/libs/core/src/main/java/org/opensearch/core/internal/io/Streams.java b/libs/core/src/main/java/org/opensearch/core/internal/io/Streams.java index 1938e1bbf4dff..67765392b1d46 100644 --- a/libs/core/src/main/java/org/opensearch/core/internal/io/Streams.java +++ b/libs/core/src/main/java/org/opensearch/core/internal/io/Streams.java @@ -38,20 +38,15 @@ /** * Simple utility methods for file and stream copying. - * All copy methods use a block size of 4096 bytes, - * and close all affected streams when done. + * All copy methods close all affected streams when done. *

* Mainly for use within the framework, * but also useful for application code. */ -public class Streams { +public abstract class Streams { private static final ThreadLocal buffer = ThreadLocal.withInitial(() -> new byte[8 * 1024]); - private Streams() { - - } - /** * Copy the contents of the given InputStream to the given OutputStream. Optionally, closes both streams when done. * diff --git a/libs/core/src/main/java11/org/opensearch/common/collect/List.java b/libs/core/src/main/java11/org/opensearch/common/collect/List.java deleted file mode 100644 index 56216d6bbafe2..0000000000000 --- a/libs/core/src/main/java11/org/opensearch/common/collect/List.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.common.collect; - -import java.util.Collection; - -public class List { - - /** - * Delegates to the Java9 {@code List.of()} method. - * - * @param the {@code List}'s element type - * @return an empty {@code List} - */ - public static java.util.List of() { - return java.util.List.of(); - } - - /** - * Delegates to the Java9 {@code List.of()} method. - * - * @param the {@code List}'s element type - * @param e1 the single element - * @return a {@code List} containing the specified element - */ - public static java.util.List of(T e1) { - return java.util.List.of(e1); - } - - /** - * Delegates to the Java9 {@code List.of()} method. - * - * @param the {@code List}'s element type - * @param e1 the single element - * @return a {@code List} containing the specified element - */ - public static java.util.List of(T e1, T e2) { - return java.util.List.of(e1, e2); - } - - /** - * Delegates to the Java9 {@code List.of()} method. - * - * @param entries the elements to be contained in the list - * @param the {@code List}'s element type - * @return an unmodifiable list containing the specified elements. - */ - @SafeVarargs - @SuppressWarnings("varargs") - public static java.util.List of(T... entries) { - return java.util.List.of(entries); - } - - /** - * Delegates to the Java9 {@code List.copyOf()} method. - * - * @param the {@code List}'s element type - * @param coll a {@code Collection} from which elements are drawn, must be non-null - * @return a {@code List} containing the elements of the given {@code Collection} - */ - public static java.util.List copyOf(Collection coll) { - return java.util.List.copyOf(coll); - } -} diff --git a/libs/core/src/main/java11/org/opensearch/common/collect/Map.java b/libs/core/src/main/java11/org/opensearch/common/collect/Map.java deleted file mode 100644 index 21de546869390..0000000000000 --- a/libs/core/src/main/java11/org/opensearch/common/collect/Map.java +++ /dev/null @@ -1,194 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.common.collect; - -public class Map { - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of() { - return java.util.Map.of(); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1) { - return java.util.Map.of(k1, v1); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2) { - return java.util.Map.of(k1, v1, k2, v2); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of(K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5, K k6, V v6, K k7, V v7) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of( - K k1, - V v1, - K k2, - V v2, - K k3, - V v3, - K k4, - V v4, - K k5, - V v5, - K k6, - V v6, - K k7, - V v7, - K k8, - V v8 - ) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of( - K k1, - V v1, - K k2, - V v2, - K k3, - V v3, - K k4, - V v4, - K k5, - V v5, - K k6, - V v6, - K k7, - V v7, - K k8, - V v8, - K k9, - V v9 - ) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9); - } - - /** - * Delegates to the Java9 {@code Map.of()} method. - */ - public static java.util.Map of( - K k1, - V v1, - K k2, - V v2, - K k3, - V v3, - K k4, - V v4, - K k5, - V v5, - K k6, - V v6, - K k7, - V v7, - K k8, - V v8, - K k9, - V v9, - K k10, - V v10 - ) { - return java.util.Map.of(k1, v1, k2, v2, k3, v3, k4, v4, k5, v5, k6, v6, k7, v7, k8, v8, k9, v9, k10, v10); - } - - /** - * Delegates to the Java9 {@code Map.ofEntries()} method. - */ - @SafeVarargs - @SuppressWarnings("varargs") - public static java.util.Map ofEntries(java.util.Map.Entry... entries) { - return java.util.Map.ofEntries(entries); - } - - /** - * Delegates to the Java9 {@code Map.entry()} method. - */ - public static java.util.Map.Entry entry(K k, V v) { - return java.util.Map.entry(k, v); - } - - /** - * Delegates to the Java10 {@code Map.copyOf()} method. - */ - public static java.util.Map copyOf(java.util.Map map) { - return java.util.Map.copyOf(map); - } - -} diff --git a/libs/core/src/main/java11/org/opensearch/common/collect/Set.java b/libs/core/src/main/java11/org/opensearch/common/collect/Set.java deleted file mode 100644 index 0350023e4e894..0000000000000 --- a/libs/core/src/main/java11/org/opensearch/common/collect/Set.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.common.collect; - -import java.util.Collection; - -public class Set { - - /** - * Delegates to the Java9 {@code Set.of()} method. - * - * @param the {@code Set}'s element type - * @return an empty {@code Set} - */ - public static java.util.Set of() { - return java.util.Set.of(); - } - - /** - * Delegates to the Java9 {@code Set.of()} method. - * - * @param the {@code Set}'s element type - * @param e1 the single element - * @return a {@code Set} containing the specified element - */ - public static java.util.Set of(T e1) { - return java.util.Set.of(e1); - } - - /** - * Delegates to the Java9 {@code Set.of()} method. - * - * @param the {@code Set}'s element type - * @param e1 the first element - * @param e2 the second element - * @return a {@code Set} containing the specified element - */ - public static java.util.Set of(T e1, T e2) { - return java.util.Set.of(e1, e2); - } - - /** - * Delegates to the Java9 {@code Set.of()} method. - * - * @param entries the elements to be contained in the set - * @param the {@code Set}'s element type - * @return an unmodifiable set containing the specified elements. - */ - @SafeVarargs - @SuppressWarnings("varargs") - public static java.util.Set of(T... entries) { - return java.util.Set.of(entries); - } - - /** - * Delegates to the Java10 {@code Set.copyOf} method. - * - * @param the {@code Set}'s element type - * @param coll a {@code Collection} from which elements are drawn, must be non-null - * @return a {@code Set} containing the elements of the given {@code Collection} - */ - public static java.util.Set copyOf(Collection coll) { - return java.util.Set.copyOf(coll); - } -} diff --git a/libs/core/src/main/java11/org/opensearch/core/internal/io/Streams.java b/libs/core/src/main/java11/org/opensearch/core/internal/io/Streams.java deleted file mode 100644 index 67765392b1d46..0000000000000 --- a/libs/core/src/main/java11/org/opensearch/core/internal/io/Streams.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.core.internal.io; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; - -/** - * Simple utility methods for file and stream copying. - * All copy methods close all affected streams when done. - *

- * Mainly for use within the framework, - * but also useful for application code. - */ -public abstract class Streams { - - private static final ThreadLocal buffer = ThreadLocal.withInitial(() -> new byte[8 * 1024]); - - /** - * Copy the contents of the given InputStream to the given OutputStream. Optionally, closes both streams when done. - * - * @param in the stream to copy from - * @param out the stream to copy to - * @param close whether to close both streams after copying - * @param buffer buffer to use for copying - * @return the number of bytes copied - * @throws IOException in case of I/O errors - */ - public static long copy(final InputStream in, final OutputStream out, byte[] buffer, boolean close) throws IOException { - Exception err = null; - try { - long byteCount = 0; - int bytesRead; - while ((bytesRead = in.read(buffer)) != -1) { - out.write(buffer, 0, bytesRead); - byteCount += bytesRead; - } - out.flush(); - return byteCount; - } catch (IOException | RuntimeException e) { - err = e; - throw e; - } finally { - if (close) { - IOUtils.close(err, in, out); - } - } - } - - /** - * @see #copy(InputStream, OutputStream, byte[], boolean) - */ - public static long copy(final InputStream in, final OutputStream out, boolean close) throws IOException { - return copy(in, out, buffer.get(), close); - } - - /** - * @see #copy(InputStream, OutputStream, byte[], boolean) - */ - public static long copy(final InputStream in, final OutputStream out, byte[] buffer) throws IOException { - return copy(in, out, buffer, true); - } - - /** - * @see #copy(InputStream, OutputStream, byte[], boolean) - */ - public static long copy(final InputStream in, final OutputStream out) throws IOException { - return copy(in, out, buffer.get(), true); - } -} diff --git a/server/src/main/java/org/opensearch/monitor/jvm/JvmPid.java b/server/src/main/java/org/opensearch/monitor/jvm/JvmPid.java index bf9b5f6fe4f3f..9a2fbfbd27c68 100644 --- a/server/src/main/java/org/opensearch/monitor/jvm/JvmPid.java +++ b/server/src/main/java/org/opensearch/monitor/jvm/JvmPid.java @@ -32,31 +32,10 @@ package org.opensearch.monitor.jvm; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.message.ParameterizedMessage; - -import java.lang.management.ManagementFactory; - class JvmPid { - private static final long PID; - static long getPid() { - return PID; - } - - static { - PID = initializePid(); - } - - private static long initializePid() { - final String name = ManagementFactory.getRuntimeMXBean().getName(); - try { - return Long.parseLong(name.split("@")[0]); - } catch (final NumberFormatException e) { - LogManager.getLogger(JvmPid.class).debug(new ParameterizedMessage("failed parsing PID from [{}]", name), e); - return -1; - } + return ProcessHandle.current().pid(); } } diff --git a/server/src/main/java11/org/opensearch/monitor/jvm/JvmPid.java b/server/src/main/java11/org/opensearch/monitor/jvm/JvmPid.java deleted file mode 100644 index 9a2fbfbd27c68..0000000000000 --- a/server/src/main/java11/org/opensearch/monitor/jvm/JvmPid.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.monitor.jvm; - -class JvmPid { - - static long getPid() { - return ProcessHandle.current().pid(); - } - -} From a36a1e395c2f535b4c78be1a14241b2e05a4a52c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 11:17:21 -0700 Subject: [PATCH 15/41] Bump jna from 5.10.0 to 5.11.0 in /buildSrc (#2946) Bumps [jna](https://github.com/java-native-access/jna) from 5.10.0 to 5.11.0. - [Release notes](https://github.com/java-native-access/jna/releases) - [Changelog](https://github.com/java-native-access/jna/blob/master/CHANGES.md) - [Commits](https://github.com/java-native-access/jna/compare/5.10.0...5.11.0) --- updated-dependencies: - dependency-name: net.java.dev.jna:jna dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- buildSrc/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index d478a1fd45e80..077064e33187c 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -110,7 +110,7 @@ dependencies { api 'com.netflix.nebula:gradle-info-plugin:11.3.3' api 'org.apache.rat:apache-rat:0.13' api 'commons-io:commons-io:2.7' - api "net.java.dev.jna:jna:5.10.0" + api "net.java.dev.jna:jna:5.11.0" api 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2' api 'de.thetaphi:forbiddenapis:3.3' api 'com.avast.gradle:gradle-docker-compose-plugin:0.15.2' From ab026025d711ab6f4e807c0ad40cae3d9ee055ac Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 11:18:02 -0700 Subject: [PATCH 16/41] Bump reactor-core from 3.4.15 to 3.4.17 in /plugins/repository-azure (#2947) * Bump reactor-core from 3.4.15 to 3.4.17 in /plugins/repository-azure Bumps [reactor-core](https://github.com/reactor/reactor-core) from 3.4.15 to 3.4.17. - [Release notes](https://github.com/reactor/reactor-core/releases) - [Commits](https://github.com/reactor/reactor-core/compare/v3.4.15...v3.4.17) --- updated-dependencies: - dependency-name: io.projectreactor:reactor-core dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-azure/build.gradle | 2 +- plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 | 1 - plugins/repository-azure/licenses/reactor-core-3.4.17.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 create mode 100644 plugins/repository-azure/licenses/reactor-core-3.4.17.jar.sha1 diff --git a/plugins/repository-azure/build.gradle b/plugins/repository-azure/build.gradle index da644d77eb488..a18f18cea185e 100644 --- a/plugins/repository-azure/build.gradle +++ b/plugins/repository-azure/build.gradle @@ -56,7 +56,7 @@ dependencies { implementation project(':modules:transport-netty4') api 'com.azure:azure-storage-blob:12.15.0' api 'org.reactivestreams:reactive-streams:1.0.3' - api 'io.projectreactor:reactor-core:3.4.15' + api 'io.projectreactor:reactor-core:3.4.17' api 'io.projectreactor.netty:reactor-netty:1.0.17' api 'io.projectreactor.netty:reactor-netty-core:1.0.16' api 'io.projectreactor.netty:reactor-netty-http:1.0.16' diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 deleted file mode 100644 index a89de48b20b51..0000000000000 --- a/plugins/repository-azure/licenses/reactor-core-3.4.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -28ccf513fe64709c8ded30ea3f387fc718db9626 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/reactor-core-3.4.17.jar.sha1 b/plugins/repository-azure/licenses/reactor-core-3.4.17.jar.sha1 new file mode 100644 index 0000000000000..3803458775631 --- /dev/null +++ b/plugins/repository-azure/licenses/reactor-core-3.4.17.jar.sha1 @@ -0,0 +1 @@ +52176b50d2191bc32a8a235124e7aff7f291754b \ No newline at end of file From 002e614ecf9357a2636aaf6a4db98cec8af33129 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 11:27:19 -0700 Subject: [PATCH 17/41] Bump hadoop-minicluster from 3.3.1 to 3.3.2 in /test/fixtures/hdfs-fixture (#2381) * Bump hadoop-minicluster in /test/fixtures/hdfs-fixture Bumps hadoop-minicluster from 3.3.1 to 3.3.2. --- updated-dependencies: - dependency-name: org.apache.hadoop:hadoop-minicluster dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Fixing gradle check Signed-off-by: Vacha Shah Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Vacha Shah --- test/fixtures/hdfs-fixture/build.gradle | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/fixtures/hdfs-fixture/build.gradle b/test/fixtures/hdfs-fixture/build.gradle index c56cc6d196b63..2ff444c03b123 100644 --- a/test/fixtures/hdfs-fixture/build.gradle +++ b/test/fixtures/hdfs-fixture/build.gradle @@ -33,7 +33,7 @@ apply plugin: 'opensearch.java' group = 'hdfs' dependencies { - api "org.apache.hadoop:hadoop-minicluster:3.3.1" + api "org.apache.hadoop:hadoop-minicluster:3.3.2" api "org.apache.commons:commons-compress:1.21" api "commons-codec:commons-codec:${versions.commonscodec}" api "org.apache.logging.log4j:log4j-core:${versions.log4j}" @@ -43,4 +43,5 @@ dependencies { api "com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:${versions.jackson}" api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" api 'net.minidev:json-smart:2.4.8' + api "org.mockito:mockito-core:${versions.mockito}" } From bb19f627f090b04cd60213e4b00ee38b31f6de63 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 11:30:36 -0700 Subject: [PATCH 18/41] Bump guava from 30.1.1-jre to 31.1-jre in /plugins/repository-hdfs (#2948) * Bump guava from 30.1.1-jre to 31.1-jre in /plugins/repository-hdfs Bumps [guava](https://github.com/google/guava) from 30.1.1-jre to 31.1-jre. - [Release notes](https://github.com/google/guava/releases) - [Commits](https://github.com/google/guava/commits) --- updated-dependencies: - dependency-name: com.google.guava:guava dependency-type: direct:production ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-hdfs/build.gradle | 2 +- plugins/repository-hdfs/licenses/guava-30.1.1-jre.jar.sha1 | 1 - plugins/repository-hdfs/licenses/guava-31.1-jre.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-hdfs/licenses/guava-30.1.1-jre.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/guava-31.1-jre.jar.sha1 diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index d8811ded8d092..02ac822f94995 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -66,7 +66,7 @@ dependencies { api 'org.apache.avro:avro:1.10.2' api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" api 'com.google.code.gson:gson:2.9.0' - runtimeOnly 'com.google.guava:guava:30.1.1-jre' + runtimeOnly 'com.google.guava:guava:31.1-jre' api 'com.google.protobuf:protobuf-java:3.20.0' api "commons-logging:commons-logging:${versions.commonslogging}" api 'commons-cli:commons-cli:1.2' diff --git a/plugins/repository-hdfs/licenses/guava-30.1.1-jre.jar.sha1 b/plugins/repository-hdfs/licenses/guava-30.1.1-jre.jar.sha1 deleted file mode 100644 index 39e641fc7834f..0000000000000 --- a/plugins/repository-hdfs/licenses/guava-30.1.1-jre.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -87e0fd1df874ea3cbe577702fe6f17068b790fd8 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/guava-31.1-jre.jar.sha1 b/plugins/repository-hdfs/licenses/guava-31.1-jre.jar.sha1 new file mode 100644 index 0000000000000..e57390ebe1299 --- /dev/null +++ b/plugins/repository-hdfs/licenses/guava-31.1-jre.jar.sha1 @@ -0,0 +1 @@ +60458f877d055d0c9114d9e1a2efb737b4bc282c \ No newline at end of file From ca102ea209d142b73f5c037ff0624ab704e00c04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 12:25:14 -0700 Subject: [PATCH 19/41] Bump grpc-context from 1.29.0 to 1.45.1 in /plugins/repository-gcs (#2944) * Bump grpc-context from 1.29.0 to 1.45.1 in /plugins/repository-gcs Bumps [grpc-context](https://github.com/grpc/grpc-java) from 1.29.0 to 1.45.1. - [Release notes](https://github.com/grpc/grpc-java/releases) - [Commits](https://github.com/grpc/grpc-java/compare/v1.29.0...v1.45.1) --- updated-dependencies: - dependency-name: io.grpc:grpc-context dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] --- plugins/repository-gcs/build.gradle | 2 +- plugins/repository-gcs/licenses/grpc-context-1.29.0.jar.sha1 | 1 - plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/grpc-context-1.29.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 0e1ed06879f91..241cd70eba071 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -75,7 +75,7 @@ dependencies { api 'com.google.http-client:google-http-client-jackson2:1.35.0' api 'com.google.http-client:google-http-client-gson:1.41.4' api 'com.google.api:gax-httpjson:0.62.0' - api 'io.grpc:grpc-context:1.29.0' + api 'io.grpc:grpc-context:1.45.1' api 'io.opencensus:opencensus-api:0.18.0' api 'io.opencensus:opencensus-contrib-http-util:0.18.0' api 'com.google.apis:google-api-services-storage:v1-rev20200814-1.30.10' diff --git a/plugins/repository-gcs/licenses/grpc-context-1.29.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.29.0.jar.sha1 deleted file mode 100644 index a549827edd283..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-context-1.29.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1d8a441110f86f8927543dc3007639080441ea3c \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 new file mode 100644 index 0000000000000..eb7e4fcd78e97 --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.45.1.jar.sha1 @@ -0,0 +1 @@ +485a08c019cc78914a477b1dfc7052820b8d822c \ No newline at end of file From 25d81c5fd5f680da2ca3db1cbbda66b68078c2fb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Apr 2022 15:22:12 -0700 Subject: [PATCH 20/41] Bump google-oauth-client from 1.33.2 to 1.33.3 in /plugins/discovery-gce (#2943) * Bump google-oauth-client from 1.33.2 to 1.33.3 in /plugins/discovery-gce Bumps [google-oauth-client](https://github.com/googleapis/google-oauth-java-client) from 1.33.2 to 1.33.3. - [Release notes](https://github.com/googleapis/google-oauth-java-client/releases) - [Changelog](https://github.com/googleapis/google-oauth-java-client/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/google-oauth-java-client/compare/v1.33.2...v1.33.3) --- updated-dependencies: - dependency-name: com.google.oauth-client:google-oauth-client dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Updating SHAs Signed-off-by: dependabot[bot] * Fixing precommit failures Signed-off-by: Vacha Shah Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: dependabot[bot] Co-authored-by: Vacha Shah --- plugins/discovery-gce/build.gradle | 14 +++++++++++--- .../licenses/google-oauth-client-1.33.2.jar.sha1 | 1 - .../licenses/google-oauth-client-1.33.3.jar.sha1 | 1 + 3 files changed, 12 insertions(+), 4 deletions(-) delete mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-oauth-client-1.33.3.jar.sha1 diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index eb695f84b2bd0..beae0d84685a4 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -24,7 +24,7 @@ versions << [ dependencies { api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}" api "com.google.api-client:google-api-client:${versions.google}" - api "com.google.oauth-client:google-oauth-client:1.33.2" + api "com.google.oauth-client:google-oauth-client:1.33.3" api "com.google.http-client:google-http-client:${versions.google}" api "com.google.http-client:google-http-client-jackson2:${versions.google}" api 'com.google.code.findbugs:jsr305:3.0.2' @@ -58,13 +58,21 @@ test { thirdPartyAudit.ignoreMissingClasses( // classes are missing 'javax.jms.Message', - 'com.google.common.base.Splitter', - 'com.google.common.collect.Lists', 'javax.servlet.ServletContextEvent', 'javax.servlet.ServletContextListener', 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', 'org.apache.log.Logger', + 'com.google.api.client.json.gson.GsonFactory', + 'com.google.common.base.Preconditions', + 'com.google.common.base.Splitter', + 'com.google.common.cache.CacheBuilder', + 'com.google.common.cache.CacheLoader', + 'com.google.common.cache.LoadingCache', + 'com.google.common.collect.ImmutableMap', + 'com.google.common.collect.ImmutableMap$Builder', + 'com.google.common.collect.ImmutableSet', + 'com.google.common.collect.Lists', 'com.google.common.collect.Multiset', 'com.google.common.collect.SortedMultiset', 'com.google.common.collect.TreeMultiset', diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 deleted file mode 100644 index 289e8e8261fd3..0000000000000 --- a/plugins/discovery-gce/licenses/google-oauth-client-1.33.2.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2810fb515fe110295dc6867fc9f70c401b66daf3 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-oauth-client-1.33.3.jar.sha1 b/plugins/discovery-gce/licenses/google-oauth-client-1.33.3.jar.sha1 new file mode 100644 index 0000000000000..f2afaa1bc2dba --- /dev/null +++ b/plugins/discovery-gce/licenses/google-oauth-client-1.33.3.jar.sha1 @@ -0,0 +1 @@ +9d445a8649b0de731922b9a3ebf1552b5403611d \ No newline at end of file From b2f2658f8d342340ce6047ccd3027aba04a88f07 Mon Sep 17 00:00:00 2001 From: Kunal Kotwani Date: Mon, 18 Apr 2022 15:50:21 -0700 Subject: [PATCH 21/41] Override toString for usable logs (#2895) Signed-off-by: Kunal Kotwani --- .../opensearch/client/cluster/ProxyModeInfo.java | 16 ++++++++++++++++ .../client/cluster/RemoteConnectionInfo.java | 16 ++++++++++++++++ .../opensearch/client/cluster/SniffModeInfo.java | 12 ++++++++++++ 3 files changed, 44 insertions(+) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/ProxyModeInfo.java b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/ProxyModeInfo.java index fe5b767a7f68f..1b6838d455182 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/ProxyModeInfo.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/ProxyModeInfo.java @@ -94,4 +94,20 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(address, serverName, maxSocketConnections, numSocketsConnected); } + + @Override + public String toString() { + return "ProxyModeInfo{" + + "address='" + + address + + '\'' + + ", serverName='" + + serverName + + '\'' + + ", maxSocketConnections=" + + maxSocketConnections + + ", numSocketsConnected=" + + numSocketsConnected + + '}'; + } } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/RemoteConnectionInfo.java b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/RemoteConnectionInfo.java index 4f91d32452d26..ee51a5e58f26e 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/RemoteConnectionInfo.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/RemoteConnectionInfo.java @@ -142,6 +142,22 @@ public int hashCode() { return Objects.hash(modeInfo, initialConnectionTimeoutString, clusterAlias, skipUnavailable); } + @Override + public String toString() { + return "RemoteConnectionInfo{" + + "modeInfo=" + + modeInfo + + ", initialConnectionTimeoutString='" + + initialConnectionTimeoutString + + '\'' + + ", clusterAlias='" + + clusterAlias + + '\'' + + ", skipUnavailable=" + + skipUnavailable + + '}'; + } + public interface ModeInfo { boolean isConnected(); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/SniffModeInfo.java b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/SniffModeInfo.java index 63c53ab4f69d5..96eddb72189b9 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/cluster/SniffModeInfo.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/cluster/SniffModeInfo.java @@ -86,4 +86,16 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(seedNodes, maxConnectionsPerCluster, numNodesConnected); } + + @Override + public String toString() { + return "SniffModeInfo{" + + "seedNodes=" + + seedNodes + + ", maxConnectionsPerCluster=" + + maxConnectionsPerCluster + + ", numNodesConnected=" + + numNodesConnected + + '}'; + } } From 8033183c469fbc2a97ee32095b3dcd46f3dfc983 Mon Sep 17 00:00:00 2001 From: Vacha Shah Date: Mon, 18 Apr 2022 16:14:35 -0700 Subject: [PATCH 22/41] Adding reta to codeowners (#2967) Signed-off-by: Vacha Shah --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 5dfa9099866d1..8b63b291a8a54 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1,3 @@ # This should match the owning team set up in https://github.com/orgs/opensearch-project/teams -* @opensearch-project/opensearch-core +* @opensearch-project/opensearch-core @reta From 8bfb082ee136aa65063d17ec1e41fb10ed759d1f Mon Sep 17 00:00:00 2001 From: Matt Weber Date: Mon, 18 Apr 2022 21:34:54 -0700 Subject: [PATCH 23/41] Use G1GC on JDK11+ (#2964) Update default jvm settings to use G1GC by default for JDK11 and greater. Signed-off-by: Matt Weber --- distribution/src/config/jvm.options | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index a20baf1be0906..ef1035489c9fc 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -33,19 +33,19 @@ ################################################################ ## GC configuration -8-13:-XX:+UseConcMarkSweepGC -8-13:-XX:CMSInitiatingOccupancyFraction=75 -8-13:-XX:+UseCMSInitiatingOccupancyOnly +8-10:-XX:+UseConcMarkSweepGC +8-10:-XX:CMSInitiatingOccupancyFraction=75 +8-10:-XX:+UseCMSInitiatingOccupancyOnly ## G1GC Configuration # NOTE: G1 GC is only supported on JDK version 10 or later # to use G1GC, uncomment the next two lines and update the version on the # following three lines to your version of the JDK -# 10-13:-XX:-UseConcMarkSweepGC -# 10-13:-XX:-UseCMSInitiatingOccupancyOnly -14-:-XX:+UseG1GC -14-:-XX:G1ReservePercent=25 -14-:-XX:InitiatingHeapOccupancyPercent=30 +# 10:-XX:-UseConcMarkSweepGC +# 10:-XX:-UseCMSInitiatingOccupancyOnly +11-:-XX:+UseG1GC +11-:-XX:G1ReservePercent=25 +11-:-XX:InitiatingHeapOccupancyPercent=30 ## JVM temporary directory -Djava.io.tmpdir=${OPENSEARCH_TMPDIR} From d8c815c6be80d592cf22f4a1c5060c2ca1857c2a Mon Sep 17 00:00:00 2001 From: Yevhen Tienkaiev Date: Tue, 19 Apr 2022 07:38:54 +0300 Subject: [PATCH 24/41] Add `positive_score_impact` support for `rank_features` (#2725) Adds positive_score_impact support for rank_features field mapper. Signed-off-by: Yevhen Tienkaiev --- .../index/mapper/RankFeaturesFieldMapper.java | 41 ++++++++++++++++--- .../mapper/RankFeaturesFieldMapperTests.java | 31 +++++++++++++- .../mapper/RankFeaturesFieldTypeTests.java | 2 +- .../test/rank_feature/10_basic.yml | 21 ++++++++++ .../test/rank_features/10_basic.yml | 40 ++++++++++++++++++ 5 files changed, 126 insertions(+), 9 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java index 43853eb40f432..21a0acd508a39 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/RankFeaturesFieldMapper.java @@ -42,7 +42,6 @@ import org.opensearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -55,8 +54,18 @@ public class RankFeaturesFieldMapper extends ParametrizedFieldMapper { public static final String CONTENT_TYPE = "rank_features"; + private static RankFeaturesFieldType ft(FieldMapper in) { + return ((RankFeaturesFieldMapper) in).fieldType(); + } + public static class Builder extends ParametrizedFieldMapper.Builder { + private final Parameter positiveScoreImpact = Parameter.boolParam( + "positive_score_impact", + false, + m -> ft(m).positiveScoreImpact, + true + ); private final Parameter> meta = Parameter.metaParam(); public Builder(String name) { @@ -66,16 +75,17 @@ public Builder(String name) { @Override protected List> getParameters() { - return Collections.singletonList(meta); + return List.of(meta, positiveScoreImpact); } @Override public RankFeaturesFieldMapper build(BuilderContext context) { return new RankFeaturesFieldMapper( name, - new RankFeaturesFieldType(buildFullName(context), meta.getValue()), + new RankFeaturesFieldType(buildFullName(context), meta.getValue(), positiveScoreImpact.getValue()), multiFieldsBuilder.build(this, context), - copyTo.build() + copyTo.build(), + positiveScoreImpact.getValue() ); } } @@ -84,9 +94,12 @@ public RankFeaturesFieldMapper build(BuilderContext context) { public static final class RankFeaturesFieldType extends MappedFieldType { - public RankFeaturesFieldType(String name, Map meta) { + private final boolean positiveScoreImpact; + + public RankFeaturesFieldType(String name, Map meta, boolean positiveScoreImpact) { super(name, false, false, false, TextSearchInfo.NONE, meta); setIndexAnalyzer(Lucene.KEYWORD_ANALYZER); + this.positiveScoreImpact = positiveScoreImpact; } @Override @@ -94,6 +107,10 @@ public String typeName() { return CONTENT_TYPE; } + public boolean positiveScoreImpact() { + return positiveScoreImpact; + } + @Override public Query existsQuery(QueryShardContext context) { throw new IllegalArgumentException("[rank_features] fields do not support [exists] queries"); @@ -115,9 +132,18 @@ public Query termQuery(Object value, QueryShardContext context) { } } - private RankFeaturesFieldMapper(String simpleName, MappedFieldType mappedFieldType, MultiFields multiFields, CopyTo copyTo) { + private final boolean positiveScoreImpact; + + private RankFeaturesFieldMapper( + String simpleName, + MappedFieldType mappedFieldType, + MultiFields multiFields, + CopyTo copyTo, + boolean positiveScoreImpact + ) { super(simpleName, mappedFieldType, multiFields, copyTo); assert fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) <= 0; + this.positiveScoreImpact = positiveScoreImpact; } @Override @@ -164,6 +190,9 @@ public void parse(ParseContext context) throws IOException { + "] in the same document" ); } + if (positiveScoreImpact == false) { + value = 1 / value; + } context.doc().addWithKey(key, new FeatureField(name(), feature, value)); } else { throw new IllegalArgumentException( diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java index 129ba6b126237..55d825d1b53bb 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldMapperTests.java @@ -67,8 +67,8 @@ protected void minimalMapping(XContentBuilder b) throws IOException { } @Override - protected void registerParameters(ParameterChecker checker) { - // no parameters to configure + protected void registerParameters(ParameterChecker checker) throws IOException { + checker.registerConflictCheck("positive_score_impact", b -> b.field("positive_score_impact", false)); } @Override @@ -95,6 +95,33 @@ public void testDefaults() throws Exception { assertTrue(freq1 < freq2); } + public void testNegativeScoreImpact() throws Exception { + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", "rank_features").field("positive_score_impact", false)) + ); + + ParsedDocument doc1 = mapper.parse(source(this::writeField)); + + IndexableField[] fields = doc1.rootDoc().getFields("field"); + assertEquals(2, fields.length); + assertThat(fields[0], Matchers.instanceOf(FeatureField.class)); + FeatureField featureField1 = null; + FeatureField featureField2 = null; + for (IndexableField field : fields) { + if (field.stringValue().equals("foo")) { + featureField1 = (FeatureField) field; + } else if (field.stringValue().equals("bar")) { + featureField2 = (FeatureField) field; + } else { + throw new UnsupportedOperationException(); + } + } + + int freq1 = RankFeatureFieldMapperTests.getFrequency(featureField1.tokenStream(null, null)); + int freq2 = RankFeatureFieldMapperTests.getFrequency(featureField2.tokenStream(null, null)); + assertTrue(freq1 > freq2); + } + public void testRejectMultiValuedFields() throws MapperParsingException, IOException { DocumentMapper mapper = createDocumentMapper(mapping(b -> { b.startObject("field").field("type", "rank_features").endObject(); diff --git a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldTypeTests.java b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldTypeTests.java index b8c653bc97ce7..8ece0d63f05ba 100644 --- a/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldTypeTests.java +++ b/modules/mapper-extras/src/test/java/org/opensearch/index/mapper/RankFeaturesFieldTypeTests.java @@ -37,7 +37,7 @@ public class RankFeaturesFieldTypeTests extends FieldTypeTestCase { public void testIsNotAggregatable() { - MappedFieldType fieldType = new RankFeaturesFieldMapper.RankFeaturesFieldType("field", Collections.emptyMap()); + MappedFieldType fieldType = new RankFeaturesFieldMapper.RankFeaturesFieldType("field", Collections.emptyMap(), true); assertFalse(fieldType.isAggregatable()); } } diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml index 6fea35eb21f4e..ac951263ca299 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml @@ -157,3 +157,24 @@ setup: - match: hits.hits.1._id: "1" + +--- +"Negative linear": + + - do: + search: + index: test + body: + query: + rank_feature: + field: url_length + linear: {} + + - match: + hits.total.value: 2 + + - match: + hits.hits.0._id: "2" + + - match: + hits.hits.1._id: "1" diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml index d4d5d2a360406..2644b9e777f6a 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml @@ -9,6 +9,9 @@ setup: properties: tags: type: rank_features + negative_reviews: + type: rank_features + positive_score_impact: false - do: index: @@ -18,6 +21,9 @@ setup: tags: foo: 3 bar: 5 + negative_reviews: + 1star: 10 + 2star: 1 - do: index: @@ -27,6 +33,9 @@ setup: tags: bar: 6 quux: 10 + negative_reviews: + 1star: 1 + 2star: 10 - do: indices.refresh: {} @@ -97,3 +106,34 @@ setup: - match: hits.hits.1._id: "1" + +--- +"Linear negative impact": + + - do: + search: + index: test + body: + query: + rank_feature: + field: negative_reviews.1star + linear: {} + + - match: + hits.hits.0._id: "2" + - match: + hits.hits.1._id: "1" + + - do: + search: + index: test + body: + query: + rank_feature: + field: negative_reviews.2star + linear: {} + + - match: + hits.hits.0._id: "1" + - match: + hits.hits.1._id: "2" From 8c9078d134d6d4e9ec0d14d41c3565fc45ae7e33 Mon Sep 17 00:00:00 2001 From: Vacha Shah Date: Tue, 19 Apr 2022 09:19:59 -0700 Subject: [PATCH 25/41] Adding workflow to create documentation related issues in documentation-website repo (#2929) Signed-off-by: Vacha Shah --- .ci/documentation/issue.md | 11 +++++ .../workflows/create-documentation-issue.yml | 41 +++++++++++++++++++ 2 files changed, 52 insertions(+) create mode 100644 .ci/documentation/issue.md create mode 100644 .github/workflows/create-documentation-issue.yml diff --git a/.ci/documentation/issue.md b/.ci/documentation/issue.md new file mode 100644 index 0000000000000..c34905605b2f6 --- /dev/null +++ b/.ci/documentation/issue.md @@ -0,0 +1,11 @@ +**Is your feature request related to a problem?** +A new feature has been added. + +**What solution would you like?** +Document the usage of the new feature. + +**What alternatives have you considered?** +N/A + +**Do you have any additional context?** +See please diff --git a/.github/workflows/create-documentation-issue.yml b/.github/workflows/create-documentation-issue.yml new file mode 100644 index 0000000000000..c81f7355a0d22 --- /dev/null +++ b/.github/workflows/create-documentation-issue.yml @@ -0,0 +1,41 @@ +name: Create Documentation Issue +on: + pull_request: + types: + - labeled +env: + PR_NUMBER: ${{ github.event.number }} + +jobs: + create-issue: + if: ${{ github.event.label.name == 'needs-documentation' }} + runs-on: ubuntu-latest + name: Create Documentation Issue + steps: + - name: GitHub App token + id: github_app_token + uses: tibdex/github-app-token@v1.5.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + installation_id: 22958780 + + - name: Checkout code + uses: actions/checkout@v2 + + - name: Edit the issue template + run: | + echo "https://github.com/opensearch-project/OpenSearch/pull/${{ env.PR_NUMBER }}." >> ./ci/documentation/issue.md + + - name: Create Issue From File + id: create-issue + uses: peter-evans/create-issue-from-file@v4 + with: + title: Add documentation related to new feature + content-filepath: ./ci/documentation/issue.md + labels: documentation + repository: opensearch-project/documentation-website + token: ${{ steps.github_app_token.outputs.token }} + + - name: Print Issue + run: echo Created related documentation issue ${{ steps.create-issue.outputs.issue-number }} From 896b97e54df4e192a15994a1076a9d1d04312f74 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 19 Apr 2022 12:34:42 -0400 Subject: [PATCH 26/41] Remove binary (class) files from the repository (#2969) Signed-off-by: Andriy Redko --- .../missingdoclet/MissingDoclet.class | Bin 14156 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class diff --git a/doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class b/doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class deleted file mode 100644 index 8729def18e4b3eda7184b1e3e49f1bf055d152db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 14156 zcmbta34B!5)j#KEd6UTlhOlT*aeyetJXqnwXLEAqD9eG+uHWi?weigVi&vE(nYJ4?|<%lZKq+q?oGXiS2dq0V|fYLW!QhZ13X7?aV_*tEX!q827@Wt!3yk0mi8wJ{VOvnnHFkALg_(-mTe{ZHS zRvL5x_&zff!%Vu6Rsr9kaJVrFW-hPH!cGQ}4ON#Iv>NA}{4pz*c&U+^gqGhVYBfVEL#m#T_Ss2VNw~*D5WcDt3e^Aipk71sfR$kQZT%$si2v_ z=o9KP*>0OjGijD=*K1OQu7XHn5l2-xDsH|bF%u{`St@GMY&uDnikTD_g&+dh8taJ; zhOMxx4~6A+n3N#MvnK+59!kZb(`V;MWvkQPP)dUYQw3dKmcg&vYtk;-4VuQRU7+bq zcZF=$HRzo{giOr=-cgBl4p3e|e~n51MgI+Z)6;A9^g;94EHcrw1`(h^y3VBQ>D^%3 z-cT~2L4i8G1a)T@rumvzCtL~|1BSGxuRS#2=)6e*&{|3#pqmW35r%8bDYfNt@ov%w z>1J3jki;<^!EH?KlR4%1&^s$Rdw^OWrjHnO3)8|ez&OaL%bjA{d z7O}#CP(K7}UOuLC7m*>@?RL7uppU^J7&`l|E(CX>=bLv$E<^5NHqWH|YQkfg^BC z+x%8ebUFu}GP1!-pP>f~`m88||AUKVyh#tz=R|)d!jV`AmIkcYYS0(p3ynLyJsuv6 zTF$1W^m%&Nq=UlHC6VNYSPU2p%c&kU>5vGBsRg7d-Y-s&bQ~5R;eB$>5tAOH$8EW9 zf!xQF8My}zT>;g6*%nErK*FQ6!=NYN)o4VfdrENZV3?7e zBgwAau~e^>ioly3>~Gg@*CZ7K?Dz{zCynNg)q{~}*h+Zl+f1jn`orDR0E-;EmaPIEM@%=_ zvmGcJJoFQw#J}50Etb`ODy#i$)FSSFWBA`)L}uP0czemDU(&Czpp{67Mn88Fs%6Zo zP^nDbvIdV$vT)bc?MWP$WqY z{TX7QyMx<=H(xdBuk^QU-h@N#>;mDMggj~v`@2d15WXoCz7bH<0Eyvy&7{|bcf66L zYxlhLhG?lbb99C6Sb6C!z(^;+3lrIClbI12O~>(J+#SJ_XoP9~Byc>&2?)n~QkX3< zP1a%!rCi9x1{ZdMwB^mzeC;CNYw1k(dDd!KTSm z1UEBbCR<~@RszO9ybh9$<&z$sh8__Ysrf?*%kRXZvfOl&%XkKq8TJx+XwBwn$4NDK z7Hr`7kPE`VCw!90Co>`s1My@u)B_~Wu54<`UeIBA8GMS#r;6<;MEKQT!gJYg@aZlk zZDpnXKa=MXEPi2UFbExTH!m=`g3o{+O2ymaS6hjuP!b!R=}2eR#D;M;F7dELTU#Nu zecF8iTKqViqjS01|44mz%L2+XiDgB(3W}ydCKYrQjy;`MlC##ALQQ*4WQk*5nI$ z6^wo;w!1x)>gg5dW8yX8s`F}YHn<7!%I>@BQt@~+S=Ss1A=XdAWK?K#%q_eIpaz27 zEjn*8{?G;wuSEdoh=5;*(*CW3DgUbAPH5Cf*z$+1o+uIGE{o)a8G(?7QX33Ap<&s%4A)(@N8~s|RT~ZNKt5^w zsk^M6!IXsbj?OKEO6Wu!z+FtSw-vTd0N?0Ma>PrA4FCWfu>oEO!_wG6g+8g<>*b62 z5`#A*9W&-Insprb*F>TaIFm2s%it;5IT`4+jFjiB>H=bmzIJ^$NUs{aWg^F}8{w|A z9sGO_nY@*Iuv^^W2#EW2E-mwsYYaNx4(i4qn*|nf10IHcSU#3-ZIt-au)?>A!<$NE zaW9-n7z3BTaTbW+Xy3}oe5y2SX5Wnsw zMG&h!dnR-=vyaTQBG1Zpwr;{0PqKHj(-5wSCmb`_90yS^n23(@9+U#t4-OunAq4KnwKK;{*B_fJn4e|s;#hu>%L zdma65AHadoN!Y=Fnf!kK0FDbY51EGlkptl9;Tpc1O#UDv5I1`4{3+8}*)WcolkFfl zu1*NlbX&N zcG2%KgFLm_`ZCJm7I|#@8<*X9-3QltI^ql>}8%S zft(o)2Hy`D^26Hf82N6Tuat-QvnD?vaZ5=k>6!wN@+1fcx><$mVgacI7`@Q@t{*A%Eb}w!8{LVqp z3kgAfnSTo!!;4BH7=dH%s_pDi6&vmD1~31fUorR(V1J>8?NB&Nxr0ppqt3f+v-Dja zGEDqulZxmmi4$Hm`L7ZN6t#4A20O)NKQ12m-_!RH<3ta6{cOP9ZzYqs`a^iRZ7>>b zcWaJt=D`%Q^ZdFbCFcT3{Dz6t3Gxx(>`TWdnZsAMF2t<2Og;Wdp-dgW=R@=5E|wqR3ZTh69srkWvL z#qGi74K^IJOf_4Yih|v1TO=!}PBsav%GVrIouY?#wloDhB|)f8Gu2#a^4U#D5w>>( z11$j!kKa`Dq;q;_OIL71Cm_u}@d8s-NN*`l(cRjVKE;`)S}5&P1#er@M>kXz)0_-z z1>#+UJ-urpRy5oqVX{GUVP2x4{;lCq#aT>?ZCS}HXrn`H_~)f+u{z68bs+e-%4GC3 zQ!P#lk^;;yiGu~&_wP~(HpcI0m54`L)Rhw!DYa_aIn~*Zk*51xcJGfel zw;@q0S7cd4l^RI4!V5}!gM$<6cWQ#n3+_Kr@I zPhBAkhD_C@nlbrGpK3Ij5QP|LaRRJyv zAcfa%$?K0K%_GP?-u##}2Zpm4HA9{Rkq7mXthyKV(yY3SpORIdidlHGf?tx`dR^j1 zU6;7()+O$#b&1y~*k=yD6`g`#yh+K_ee1+xqIEjjPA5Np=b_e#v5fNM>ccd@y7&-P zR3D);4^gdMEPfb$iq69CY@CB=ITg}MoS+HNG-Kc8v;?iDy#_6%WvH<{9>FXdlI41z zyD(3oZ}sDJ&gR3&In-do^G2va(WaWmsc|!^O(V3%uC9}6Te{jV)u2;-2&)umh~{X# ztOZV6fsu7IA2;Yra6P(;j%cW6+dDc?d8nQ`G)CUxKy0^k0$!H)5BRk)QeacAy7mxl zsII9!Oq()Tn5z4Z0vAt!Zcplc{0^n<(+f5>(q~&@%ixx0aLQ+K>aWnbC|A+fvY=e;U<3Qh%M8G@6tixi_W-K*V!fH9PEaKk8iWgx zE3xb#(Ny%|PSk<_I@E--tEwN>puS%L)DH=~w;`5B*YDET2g!-8H z0@(IjAlA33g1(ytT2&gfDh&{B3;@n2=#vg;9^yBt!ocmAlE)1;IN+%wS(WD!e<1@B?bL_=IYyr!p@)EedZfB$m>wCT z2b}UgDTfR1rG2^qpd1mLd{NVIb%usO4q!Y;PwuCM?)TIC=@j?-nf)}){r>6!^5oF` z@gZu^`xM0y)Hw5X096i6e>|2NFJ1!!=U-si+dU2S3K1{z59=7FXqt+Wg zqcFWh1GwfKq+dgFUZ!hsGkGoj32!j|Odq1Z&=BqzAECd2%a7C7fuJAYvhYQ;{|48B zzoQfMI&k|IF7p_Xs-KH9q~v)nDe)z|#aL@Oc5%6EJ(avc;TgsW=qQ`j_Smd;g3far z4BDf)?*zy@YD_7?^#*_Lf!>-6ZomyUwh-aH#bM56z`$=@o~$kV-7x)OADOjff5OLJ znvLGSNUx^E@dH#;_+UYXO|_=usbumrIIBLY|u!1Ngy_t?v+V3Gk7Y$F zsmxl4St6Ik_fiSe+b~yUSfdjXM|>Jh;kh)G7vT66bUL4j$C3-Fo~!VjubP^;me%uP zyoRfzEBP$yZey+&KLeeN{cZgJ37iP?*=mckRN zX_)^>U`W)^R5GUe-lRDO7ka!)rywJX+g1mQuVO7P#vH8~VFVB(43G0M?1wK{lM7DW zOS5pi4F|aqhrMWoTfsjM);z_*y>wH}F%AZ*7d^(X0riFDg(KX}R9`$q+slhxHCta) zUZkb>2yfB?T<;m83(7rqKL_iLMI(GUG%{aNUxJ*Kx{o^Oo{f>)cstO&0vFo7909r;aj$t5 z_W`M!>0*vTMvG`SE#`jo8ko0$V;qOY2gbCWx5p1)yb!8~ir)m7C&*8REt70;&;U%1 z@SaX3Iqi;E>)!KDVBP%FN~pk@hE0^@_W-nZC|H~;!eI-r1<}nQ=@sn|0oX`~6|NRm z*rmV}?~<=;6;$yJBYY#IgN_D3JTQz9-~Ot8j6Wp9J|e?zt1rY-cgj+CK}++!2dLI= zIL1RG{25`E^1|}sBmAHqTwYjRUOdbXITi6uY^v6Sg^5E{#9Kkx9`bP*RJEvrw?Ut5 zrL(vfRE>FFeLaNBBv>;+JGF9E1AeMZ*2! z-^mm$1<#5!j__Bq)@wTs!rqN%g*VWh{2n@l`jh;DEO|PXrp7Tx zo`54qeNBT`@GbaV1yn~1`P=**?A%Tz{9UNDVi@ zQSk{{K*cumL;V?e$ft#6sR)Z>2%UD=;Ie!}-=Ibr>_`v>V__xlnH&P&20*0kxHt@l z`3Hd<-{}GL5cT7~82_;G)Zlzygi;to%2KB^ol<N9>CkO2k9mL9BwndkY(bIr+MUfnn#ZFKeT7y zp;!2y{4a;ni0a;?4uhUAJ;VD3b@_ZA1Zel!6jIk<5HQc@9^}_HKTCxV;mrSj;)~w( z)d%@MaBz*_5EX8!KB&ao_04i?1*jD}6?$5_Ot*F%wIa9jRq1jEsv@a*-M(j}mR{c@ zwW&^@Dn-rXR^WZy?<;YqJt?(Pw{N=C%G?@UlKXu=r^fKX?k{w^PLi(llI2pH;r5*> zwV7`1bg4~uYxAWx#jTwowP|*3^CSG)L-25Ykj7J0C6Kz1s!`S;Dm@qF3Pe6FDA!Ps zI#G7f<#Z*=5JH3)$~dgwyHH+3@1q-0-bA<29VqXlPtksq2k0Ss1m&ZM0G~qnG<}1f zNBIK%h+ahbQ+UyTK=}&1<=0TY4)?8CRl+Pz=b0#H;my^2lnbCa&O*P}Xoz zWYGn?sAjF9s!vFmhEjeisZD;6;2QI#J2F3+A(^&Ob4PV#sy?q|ifmSdC9yLEv#Lkb zB4}d;>p4T6J*>`wFP(9k^#S0*ECJwlI$)NnSE4ywMMvbc_>?4_oaO7Wyd2nv_*c9t z7}F88l7VD5@nEmKK5J#4Eo1-_i7IEE%?_Al)fv-m6hca}TrGR0aMns?s!^@ZfzfRd z0u+(*tzvA0l$Nv28kC|Ez16NmkjY2b%UfB?d&eyAwiK!stfJH!CCc<{DpCO|P(jtH zy7X7K+N3VlUz^os>T>Qnvj09LD*8dz1X ecBrHpEG~hEXaB8p%67X{J5hSTe~rkUwEr(LO30D` From ede26d945f06522947e1077c8d0b30c1537ae688 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 19 Apr 2022 14:16:13 -0400 Subject: [PATCH 27/41] Add Gradle 7.4 Aggregated Test Reports (#2821) Signed-off-by: Andriy Redko --- build.gradle | 24 +++++++++++ gradle/code-coverage.gradle | 83 +++++++------------------------------ 2 files changed, 38 insertions(+), 69 deletions(-) diff --git a/build.gradle b/build.gradle index 7949872a85b86..ae2247bb865d1 100644 --- a/build.gradle +++ b/build.gradle @@ -50,6 +50,8 @@ plugins { id 'opensearch.global-build-info' id "com.diffplug.spotless" version "6.4.2" apply false id "org.gradle.test-retry" version "1.3.2" apply false + id "test-report-aggregation" + id 'jacoco-report-aggregation' } apply from: 'gradle/build-complete.gradle' @@ -385,6 +387,15 @@ gradle.projectsEvaluated { } } } + + dependencies { + subprojects.findAll { it.pluginManager.hasPlugin('java') }.forEach { + testReportAggregation it + } + subprojects.findAll { it.pluginManager.hasPlugin('jacoco') }.forEach { + jacocoAggregation it + } + } } // test retry configuration @@ -402,6 +413,7 @@ subprojects { // eclipse configuration allprojects { apply plugin: 'eclipse' + // Name all the non-root projects after their path so that paths get grouped together when imported into eclipse. if (path != ':') { eclipse.project.name = path @@ -558,3 +570,15 @@ subprojects { } } } + +reporting { + reports { + testAggregateTestReport(AggregateTestReport) { + testType = TestSuiteType.UNIT_TEST + } + } +} + +tasks.named(JavaBasePlugin.CHECK_TASK_NAME) { + dependsOn tasks.named('testAggregateTestReport', TestReport) +} diff --git a/gradle/code-coverage.gradle b/gradle/code-coverage.gradle index de041eae7b72d..61719282c1ca2 100644 --- a/gradle/code-coverage.gradle +++ b/gradle/code-coverage.gradle @@ -10,92 +10,37 @@ apply plugin: 'jacoco' repositories { mavenCentral() + gradlePluginPortal() } allprojects { plugins.withId('jacoco') { - // The default JaCoCo version in Gradle 6.6.1 is 0.8.5, but at least version 0.8.6 officially supports Java 14 - jacoco.toolVersion = '0.8.7' + jacoco.toolVersion = '0.8.8' } } -def codeCoverageReportTask = tasks.register("codeCoverageReport", JacocoReport) { - description = 'Generates aggregate report from all subprojects.' - executionData.setFrom fileTree(dir: '.', include: '**/build/jacoco/*.exec') - dependsOn subprojects.findAll(s -> s.tasks.findByName('check') != null).check -} - -tasks.register("codeCoverageReportForUnitTest", JacocoReport) { - description = 'Generates aggregate report from all subprojects for unit test.' - executionData.setFrom fileTree(dir: '.', include: '**/build/jacoco/test.exec') -} - -tasks.register("codeCoverageReportForIntegrationTest", JacocoReport) { - description = 'Generates aggregate report from all subprojects for integration test.' - // These kinds of tests are integration test, and the tests can be ran by Gradle tasks with the same name - def integrationTestExecPathList = ['**/build/jacoco/integTest.exec', - '**/build/jacoco/internalClusterTest.exec', - '**/build/jacoco/javaRestTest.exec', - '**/build/jacoco/yamlRestTest.exec' ] - executionData.setFrom fileTree(dir: '.', include: integrationTestExecPathList) -} - tasks.withType(JacocoReport).configureEach { group = JavaBasePlugin.VERIFICATION_GROUP - // Select projects with corresponding tests in order to run proper tests and select proper classes to generate the report - def projectsWithJavaPlugin = subprojects.findAll { it.pluginManager.hasPlugin('java') } - def projectsWithUnitTest = projectsWithJavaPlugin.findAll { it.tasks.findByName('test').enabled } - def projectsWithIntegTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('integTest')} - def projectsWithAsyncIntegTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('asyncIntegTest')} - def projectsWithInternalClusterTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('internalClusterTest')} - def projectsWithPooledInternalClusterTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('pooledInternalClusterTest')} - def projectsWithJavaRestTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('javaRestTest')} - def projectsWithYamlRestTest = projectsWithJavaPlugin.findAll {it.tasks.findByName('yamlRestTest')} - def projectsWithIntegrationTest = projectsWithIntegTest + projectsWithAsyncIntegTest + projectsWithInternalClusterTest + projectsWithPooledInternalClusterTest + projectsWithJavaRestTest + projectsWithYamlRestTest - def projectsWithTest = projectsWithUnitTest + projectsWithIntegrationTest - - def selectedProjects - switch (name) { - case "codeCoverageReportForUnitTest": - dependsOn projectsWithUnitTest.test - selectedProjects = projectsWithUnitTest - break - case "codeCoverageReportForIntegrationTest": - dependsOn projectsWithIntegTest.integTest - dependsOn projectsWithAsyncIntegTest.asyncIntegTest - dependsOn projectsWithInternalClusterTest.internalClusterTest - dependsOn projectsWithPooledInternalClusterTest.pooledInternalClusterTest - dependsOn projectsWithJavaRestTest.javaRestTest - dependsOn projectsWithYamlRestTest.yamlRestTest - selectedProjects = projectsWithIntegrationTest - break - default: - dependsOn projectsWithUnitTest.test - dependsOn projectsWithIntegTest.integTest - dependsOn projectsWithAsyncIntegTest.asyncIntegTest - dependsOn projectsWithInternalClusterTest.internalClusterTest - dependsOn projectsWithPooledInternalClusterTest.pooledInternalClusterTest - dependsOn projectsWithJavaRestTest.javaRestTest - dependsOn projectsWithYamlRestTest.yamlRestTest - selectedProjects = projectsWithJavaPlugin - break - } - - sourceDirectories.setFrom files(selectedProjects.sourceSets.main.allSource.srcDirs) - classDirectories.setFrom files(selectedProjects.sourceSets.main.output) - reports { // Code coverage report in HTML and CSV formats are on demand, in case they take extra disk space. - xml.getRequired().set(System.getProperty('tests.coverage.report.xml', 'true').toBoolean()) - html.getRequired().set(System.getProperty('tests.coverage.report.html', 'false').toBoolean()) - csv.getRequired().set(System.getProperty('tests.coverage.report.csv', 'false').toBoolean()) + xml.required = System.getProperty('tests.coverage.report.xml', 'true').toBoolean() + html.required = System.getProperty('tests.coverage.report.html', 'false').toBoolean() + csv.required = System.getProperty('tests.coverage.report.csv', 'false').toBoolean() } } if (System.getProperty("tests.coverage")) { + reporting { + reports { + testCodeCoverageReport(JacocoCoverageReport) { + testType = TestSuiteType.UNIT_TEST + } + } + } + // Attach code coverage report task to Gradle check task project.getTasks().named(JavaBasePlugin.CHECK_TASK_NAME).configure { - dependsOn codeCoverageReportTask + dependsOn tasks.named('testCodeCoverageReport', JacocoReport) } } From 18f4495f4d0ad5f6c6f0fa68447f43fb7544514d Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Tue, 19 Apr 2022 11:52:02 -0700 Subject: [PATCH 28/41] Replace parameter 'master_timeout' with 'cluster_manager_tiemout' in RequestConverters of High-Level-Rest-Client (#2683) The REST API request parameter "master_timeout" will be deprecated in server version 3.0, and removed in 4.0. The alternative parameter "cluster_manager_timeout" is added in server 2.0 . With the change in this commit, High-Level-Rest-Client will not be compatible with OpenSearch server 1.x (and below). - Use parameter `cluster_manager_timeout` instead of `master_timeout` in High-Level-Rest-Client `RequestConverters` class for building REST requests - Modify corresponding unit tests - Change lots of "master timeout" in internal method and class names. Signed-off-by: Tianli Feng --- .../client/ClusterRequestConverters.java | 14 +++--- .../client/IndicesRequestConverters.java | 46 +++++++++---------- .../client/IngestRequestConverters.java | 6 +-- .../opensearch/client/RequestConverters.java | 14 ++++-- .../client/SnapshotRequestConverters.java | 22 ++++----- .../client/ClusterRequestConvertersTests.java | 14 +++--- .../client/IndicesRequestConvertersTests.java | 34 +++++++------- .../client/IngestRequestConvertersTests.java | 6 +-- .../client/RequestConvertersTests.java | 34 +++++++------- .../SnapshotRequestConvertersTests.java | 20 ++++---- 10 files changed, 109 insertions(+), 101 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java index 1cf52ac4169ba..da90521512dea 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java @@ -58,7 +58,7 @@ static Request clusterPutSettings(ClusterUpdateSettingsRequest clusterUpdateSett RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(clusterUpdateSettingsRequest.timeout()); - parameters.withMasterTimeout(clusterUpdateSettingsRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(clusterUpdateSettingsRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(clusterUpdateSettingsRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; @@ -69,7 +69,7 @@ static Request clusterGetSettings(ClusterGetSettingsRequest clusterGetSettingsRe RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withLocal(clusterGetSettingsRequest.local()); parameters.withIncludeDefaults(clusterGetSettingsRequest.includeDefaults()); - parameters.withMasterTimeout(clusterGetSettingsRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(clusterGetSettingsRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } @@ -88,7 +88,7 @@ static Request clusterHealth(ClusterHealthRequest healthRequest) { .withWaitForNodes(healthRequest.waitForNodes()) .withWaitForEvents(healthRequest.waitForEvents()) .withTimeout(healthRequest.timeout()) - .withMasterTimeout(healthRequest.masterNodeTimeout()) + .withClusterManagerTimeout(healthRequest.masterNodeTimeout()) .withLocal(healthRequest.local()) .withLevel(healthRequest.level()); request.addParameters(params.asMap()); @@ -105,7 +105,7 @@ static Request putComponentTemplate(PutComponentTemplateRequest putComponentTemp .build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putComponentTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(putComponentTemplateRequest.masterNodeTimeout()); if (putComponentTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } @@ -124,7 +124,7 @@ static Request getComponentTemplates(GetComponentTemplatesRequest getComponentTe final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getComponentTemplatesRequest.isLocal()); - params.withMasterTimeout(getComponentTemplatesRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(getComponentTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -136,7 +136,7 @@ static Request componentTemplatesExist(ComponentTemplatesExistRequest componentT final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(componentTemplatesRequest.isLocal()); - params.withMasterTimeout(componentTemplatesRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(componentTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -146,7 +146,7 @@ static Request deleteComponentTemplate(DeleteComponentTemplateRequest deleteComp String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_component_template").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(deleteComponentTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(deleteComponentTemplateRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java index c50ea58982e4e..4bd2f57e6b998 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java @@ -119,7 +119,7 @@ static Request deleteIndex(DeleteIndexRequest deleteIndexRequest) { RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(deleteIndexRequest.timeout()); - parameters.withMasterTimeout(deleteIndexRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(deleteIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(deleteIndexRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; @@ -131,7 +131,7 @@ static Request openIndex(OpenIndexRequest openIndexRequest) { RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(openIndexRequest.timeout()); - parameters.withMasterTimeout(openIndexRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(openIndexRequest.masterNodeTimeout()); parameters.withWaitForActiveShards(openIndexRequest.waitForActiveShards()); parameters.withIndicesOptions(openIndexRequest.indicesOptions()); request.addParameters(parameters.asMap()); @@ -144,7 +144,7 @@ static Request closeIndex(CloseIndexRequest closeIndexRequest) { RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(closeIndexRequest.timeout()); - parameters.withMasterTimeout(closeIndexRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(closeIndexRequest.masterNodeTimeout()); parameters.withIndicesOptions(closeIndexRequest.indicesOptions()); request.addParameters(parameters.asMap()); return request; @@ -156,7 +156,7 @@ static Request createIndex(CreateIndexRequest createIndexRequest) throws IOExcep RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(createIndexRequest.timeout()); - parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(createIndexRequest.masterNodeTimeout()); parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(createIndexRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -168,7 +168,7 @@ static Request updateAliases(IndicesAliasesRequest indicesAliasesRequest) throws RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(indicesAliasesRequest.timeout()); - parameters.withMasterTimeout(indicesAliasesRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(indicesAliasesRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(indicesAliasesRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; @@ -179,7 +179,7 @@ static Request putMapping(PutMappingRequest putMappingRequest) throws IOExceptio RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(putMappingRequest.timeout()); - parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(putMappingRequest.masterNodeTimeout()); parameters.withIndicesOptions(putMappingRequest.indicesOptions()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(putMappingRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -192,7 +192,7 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) { Request request = new Request(HttpGet.METHOD_NAME, RequestConverters.endpoint(indices, "_mapping")); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getMappingsRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(getMappingsRequest.masterNodeTimeout()); parameters.withIndicesOptions(getMappingsRequest.indicesOptions()); parameters.withLocal(getMappingsRequest.local()); request.addParameters(parameters.asMap()); @@ -332,7 +332,7 @@ private static Request resize(ResizeRequest resizeRequest, ResizeType type) thro RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(resizeRequest.timeout()); - params.withMasterTimeout(resizeRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(resizeRequest.masterNodeTimeout()); params.withWaitForActiveShards(resizeRequest.getWaitForActiveShards()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -349,7 +349,7 @@ private static Request resize(org.opensearch.action.admin.indices.shrink.ResizeR RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(resizeRequest.timeout()); - params.withMasterTimeout(resizeRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(resizeRequest.masterNodeTimeout()); params.withWaitForActiveShards(resizeRequest.getTargetIndexRequest().waitForActiveShards()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(resizeRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -365,7 +365,7 @@ static Request rollover(RolloverRequest rolloverRequest) throws IOException { RequestConverters.Params params = new RequestConverters.Params(); params.withTimeout(rolloverRequest.timeout()); - params.withMasterTimeout(rolloverRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(rolloverRequest.masterNodeTimeout()); params.withWaitForActiveShards(rolloverRequest.getCreateIndexRequest().waitForActiveShards()); if (rolloverRequest.isDryRun()) { params.putParam("dry_run", Boolean.TRUE.toString()); @@ -386,7 +386,7 @@ static Request getSettings(GetSettingsRequest getSettingsRequest) { params.withIndicesOptions(getSettingsRequest.indicesOptions()); params.withLocal(getSettingsRequest.local()); params.withIncludeDefaults(getSettingsRequest.includeDefaults()); - params.withMasterTimeout(getSettingsRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(getSettingsRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -402,7 +402,7 @@ static Request getIndex(GetIndexRequest getIndexRequest) { params.withLocal(getIndexRequest.local()); params.withIncludeDefaults(getIndexRequest.includeDefaults()); params.withHuman(getIndexRequest.humanReadable()); - params.withMasterTimeout(getIndexRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(getIndexRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -429,7 +429,7 @@ static Request indexPutSettings(UpdateSettingsRequest updateSettingsRequest) thr RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(updateSettingsRequest.timeout()); - parameters.withMasterTimeout(updateSettingsRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(updateSettingsRequest.masterNodeTimeout()); parameters.withIndicesOptions(updateSettingsRequest.indicesOptions()); parameters.withPreserveExisting(updateSettingsRequest.isPreserveExisting()); request.addParameters(parameters.asMap()); @@ -443,7 +443,7 @@ static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) thro .build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(putIndexTemplateRequest.masterNodeTimeout()); if (putIndexTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } @@ -461,7 +461,7 @@ static Request putIndexTemplate(PutComposableIndexTemplateRequest putIndexTempla .build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(putIndexTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(putIndexTemplateRequest.masterNodeTimeout()); if (putIndexTemplateRequest.create()) { params.putParam("create", Boolean.TRUE.toString()); } @@ -479,7 +479,7 @@ static Request simulateIndexTemplate(SimulateIndexTemplateRequest simulateIndexT .build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(simulateIndexTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(simulateIndexTemplateRequest.masterNodeTimeout()); PutComposableIndexTemplateRequest putComposableIndexTemplateRequest = simulateIndexTemplateRequest.indexTemplateV2Request(); if (putComposableIndexTemplateRequest != null) { if (putComposableIndexTemplateRequest.create()) { @@ -529,7 +529,7 @@ static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) { final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); - params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -541,7 +541,7 @@ static Request getIndexTemplates(GetComposableIndexTemplateRequest getIndexTempl final Request request = new Request(HttpGet.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(getIndexTemplatesRequest.isLocal()); - params.withMasterTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(getIndexTemplatesRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -553,7 +553,7 @@ static Request templatesExist(IndexTemplatesExistRequest indexTemplatesExistRequ final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(indexTemplatesExistRequest.isLocal()); - params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -565,7 +565,7 @@ static Request templatesExist(ComposableIndexTemplateExistRequest indexTemplates final Request request = new Request(HttpHead.METHOD_NAME, endpoint); final RequestConverters.Params params = new RequestConverters.Params(); params.withLocal(indexTemplatesExistRequest.isLocal()); - params.withMasterTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); + params.withClusterManagerTimeout(indexTemplatesExistRequest.getMasterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -587,7 +587,7 @@ static Request deleteTemplate(DeleteIndexTemplateRequest deleteIndexTemplateRequ String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_template").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -597,7 +597,7 @@ static Request deleteIndexTemplate(DeleteComposableIndexTemplateRequest deleteIn String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_index_template").addPathPart(name).build(); Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(deleteIndexTemplateRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -610,7 +610,7 @@ static Request deleteAlias(DeleteAliasRequest deleteAliasRequest) { Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(deleteAliasRequest.timeout()); - parameters.withMasterTimeout(deleteAliasRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(deleteAliasRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java index e2ede61f38ee9..829f6cf0bbba4 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java @@ -54,7 +54,7 @@ static Request getPipeline(GetPipelineRequest getPipelineRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getPipelineRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(getPipelineRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } @@ -67,7 +67,7 @@ static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOExcep RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(putPipelineRequest.timeout()); - parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(putPipelineRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(putPipelineRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; @@ -81,7 +81,7 @@ static Request deletePipeline(DeletePipelineRequest deletePipelineRequest) { RequestConverters.Params parameters = new RequestConverters.Params(); parameters.withTimeout(deletePipelineRequest.timeout()); - parameters.withMasterTimeout(deletePipelineRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(deletePipelineRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index 3e43963db519f..7a6227a7c2ec2 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -702,7 +702,7 @@ static Request putScript(PutStoredScriptRequest putStoredScriptRequest) throws I Request request = new Request(HttpPost.METHOD_NAME, endpoint); Params params = new Params(); params.withTimeout(putStoredScriptRequest.timeout()); - params.withMasterTimeout(putStoredScriptRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(putStoredScriptRequest.masterNodeTimeout()); if (Strings.hasText(putStoredScriptRequest.context())) { params.putParam("context", putStoredScriptRequest.context()); } @@ -757,7 +757,7 @@ static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); Request request = new Request(HttpGet.METHOD_NAME, endpoint); Params params = new Params(); - params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(getStoredScriptRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -767,7 +767,7 @@ static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Params params = new Params(); params.withTimeout(deleteStoredScriptRequest.timeout()); - params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(deleteStoredScriptRequest.masterNodeTimeout()); request.addParameters(params.asMap()); return request; } @@ -891,10 +891,18 @@ Params withFields(String[] fields) { return this; } + /** + * @deprecated As of 2.0, because supporting inclusive language, replaced by {@link #withClusterManagerTimeout(TimeValue)} + */ + @Deprecated Params withMasterTimeout(TimeValue masterTimeout) { return putParam("master_timeout", masterTimeout); } + Params withClusterManagerTimeout(TimeValue clusterManagerTimeout) { + return putParam("cluster_manager_timeout", clusterManagerTimeout); + } + Params withPipeline(String pipeline) { return putParam("pipeline", pipeline); } diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java index 3c92bb5ec2ab8..3b2c72266a30b 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java @@ -63,7 +63,7 @@ static Request getRepositories(GetRepositoriesRequest getRepositoriesRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getRepositoriesRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(getRepositoriesRequest.masterNodeTimeout()); parameters.withLocal(getRepositoriesRequest.local()); request.addParameters(parameters.asMap()); return request; @@ -74,7 +74,7 @@ static Request createRepository(PutRepositoryRequest putRepositoryRequest) throw Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(putRepositoryRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(putRepositoryRequest.masterNodeTimeout()); parameters.withTimeout(putRepositoryRequest.timeout()); if (putRepositoryRequest.verify() == false) { parameters.putParam("verify", "false"); @@ -91,7 +91,7 @@ static Request deleteRepository(DeleteRepositoryRequest deleteRepositoryRequest) Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(deleteRepositoryRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(deleteRepositoryRequest.masterNodeTimeout()); parameters.withTimeout(deleteRepositoryRequest.timeout()); request.addParameters(parameters.asMap()); return request; @@ -105,7 +105,7 @@ static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(verifyRepositoryRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(verifyRepositoryRequest.masterNodeTimeout()); parameters.withTimeout(verifyRepositoryRequest.timeout()); request.addParameters(parameters.asMap()); return request; @@ -119,7 +119,7 @@ static Request cleanupRepository(CleanupRepositoryRequest cleanupRepositoryReque Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(cleanupRepositoryRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(cleanupRepositoryRequest.masterNodeTimeout()); parameters.withTimeout(cleanupRepositoryRequest.timeout()); request.addParameters(parameters.asMap()); return request; @@ -132,7 +132,7 @@ static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throw .build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(createSnapshotRequest.masterNodeTimeout()); params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(createSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -148,7 +148,7 @@ static Request cloneSnapshot(CloneSnapshotRequest cloneSnapshotRequest) throws I .build(); Request request = new Request(HttpPut.METHOD_NAME, endpoint); RequestConverters.Params params = new RequestConverters.Params(); - params.withMasterTimeout(cloneSnapshotRequest.masterNodeTimeout()); + params.withClusterManagerTimeout(cloneSnapshotRequest.masterNodeTimeout()); request.addParameters(params.asMap()); request.setEntity(RequestConverters.createEntity(cloneSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); return request; @@ -167,7 +167,7 @@ static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getSnapshotsRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(getSnapshotsRequest.masterNodeTimeout()); parameters.putParam("ignore_unavailable", Boolean.toString(getSnapshotsRequest.ignoreUnavailable())); parameters.putParam("verbose", Boolean.toString(getSnapshotsRequest.verbose())); request.addParameters(parameters.asMap()); @@ -183,7 +183,7 @@ static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(snapshotsStatusRequest.masterNodeTimeout()); parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable()); request.addParameters(parameters.asMap()); return request; @@ -197,7 +197,7 @@ static Request restoreSnapshot(RestoreSnapshotRequest restoreSnapshotRequest) th .build(); Request request = new Request(HttpPost.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(restoreSnapshotRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(restoreSnapshotRequest.masterNodeTimeout()); parameters.withWaitForCompletion(restoreSnapshotRequest.waitForCompletion()); request.addParameters(parameters.asMap()); request.setEntity(RequestConverters.createEntity(restoreSnapshotRequest, RequestConverters.REQUEST_BODY_CONTENT_TYPE)); @@ -212,7 +212,7 @@ static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { Request request = new Request(HttpDelete.METHOD_NAME, endpoint); RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(deleteSnapshotRequest.masterNodeTimeout()); + parameters.withClusterManagerTimeout(deleteSnapshotRequest.masterNodeTimeout()); request.addParameters(parameters.asMap()); return request; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java index 2af164a51dbab..ed0a973081b62 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java @@ -61,7 +61,7 @@ public class ClusterRequestConvertersTests extends OpenSearchTestCase { public void testClusterPutSettings() throws IOException { ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest(); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(request, expectedParams); RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request expectedRequest = ClusterRequestConverters.clusterPutSettings(request); @@ -73,7 +73,7 @@ public void testClusterPutSettings() throws IOException { public void testClusterGetSettings() throws IOException { ClusterGetSettingsRequest request = new ClusterGetSettingsRequest(); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(request, expectedParams); request.includeDefaults(OpenSearchTestCase.randomBoolean()); if (request.includeDefaults()) { expectedParams.put("include_defaults", String.valueOf(true)); @@ -96,23 +96,23 @@ public void testClusterHealth() { case "timeout": healthRequest.timeout(timeout); expectedParams.put("timeout", timeout); - // If Master Timeout wasn't set it uses the same value as Timeout - expectedParams.put("master_timeout", timeout); + // If Cluster Manager Timeout wasn't set it uses the same value as Timeout + expectedParams.put("cluster_manager_timeout", timeout); break; case "masterTimeout": expectedParams.put("timeout", "30s"); healthRequest.masterNodeTimeout(masterTimeout); - expectedParams.put("master_timeout", masterTimeout); + expectedParams.put("cluster_manager_timeout", masterTimeout); break; case "both": healthRequest.timeout(timeout); expectedParams.put("timeout", timeout); healthRequest.masterNodeTimeout(timeout); - expectedParams.put("master_timeout", timeout); + expectedParams.put("cluster_manager_timeout", timeout); break; case "none": expectedParams.put("timeout", "30s"); - expectedParams.put("master_timeout", "30s"); + expectedParams.put("cluster_manager_timeout", "30s"); break; default: throw new UnsupportedOperationException(); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index f853378e789fa..bf6d6c922fdd7 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -144,7 +144,7 @@ public void testCreateIndex() throws IOException { Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(createIndexRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(createIndexRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(createIndexRequest, expectedParams); RequestConvertersTests.setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); Request request = IndicesRequestConverters.createIndex(createIndexRequest); @@ -166,7 +166,7 @@ public void testUpdateAliases() throws IOException { Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(indicesAliasesRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(indicesAliasesRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(indicesAliasesRequest, expectedParams); Request request = IndicesRequestConverters.updateAliases(indicesAliasesRequest); Assert.assertEquals("/_aliases", request.getEndpoint()); @@ -180,7 +180,7 @@ public void testPutMapping() throws IOException { Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(putMappingRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(putMappingRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(putMappingRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions( putMappingRequest::indicesOptions, putMappingRequest::indicesOptions, @@ -219,7 +219,7 @@ public void testGetMapping() { getMappingRequest::indicesOptions, expectedParams ); - RequestConvertersTests.setRandomMasterTimeout(getMappingRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getMappingRequest, expectedParams); RequestConvertersTests.setRandomLocal(getMappingRequest::local, expectedParams); Request request = IndicesRequestConverters.getMappings(getMappingRequest); @@ -313,7 +313,7 @@ public void testDeleteIndex() { Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(deleteIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(deleteIndexRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(deleteIndexRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions( deleteIndexRequest::indicesOptions, @@ -334,7 +334,7 @@ public void testGetSettings() throws IOException { GetSettingsRequest getSettingsRequest = new GetSettingsRequest().indices(indicesUnderTest); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getSettingsRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getSettingsRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions( getSettingsRequest::indicesOptions, getSettingsRequest::indicesOptions, @@ -385,7 +385,7 @@ public void testGetIndex() throws IOException { GetIndexRequest getIndexRequest = new GetIndexRequest(indicesUnderTest); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getIndexRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getIndexRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions(getIndexRequest::indicesOptions, getIndexRequest::indicesOptions, expectedParams); RequestConvertersTests.setRandomLocal(getIndexRequest::local, expectedParams); RequestConvertersTests.setRandomHumanReadable(getIndexRequest::humanReadable, expectedParams); @@ -425,7 +425,7 @@ public void testOpenIndex() { Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(openIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(openIndexRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(openIndexRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions(openIndexRequest::indicesOptions, openIndexRequest::indicesOptions, expectedParams); RequestConvertersTests.setRandomWaitForActiveShards(openIndexRequest::waitForActiveShards, expectedParams); @@ -453,7 +453,7 @@ public void testCloseIndex() { AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams ); - RequestConvertersTests.setRandomMasterTimeout(closeIndexRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(closeIndexRequest, expectedParams); RequestConvertersTests.setRandomIndicesOptions( closeIndexRequest::indicesOptions, closeIndexRequest::indicesOptions, @@ -680,7 +680,7 @@ private void resizeTest(ResizeType resizeType, CheckedFunction expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(resizeRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(resizeRequest, expectedParams); RequestConvertersTests.setRandomTimeout( s -> resizeRequest.setTimeout(TimeValue.parseTimeValue(s, "timeout")), resizeRequest.timeout(), @@ -723,7 +723,7 @@ public void testRollover() throws IOException { ); Map expectedParams = new HashMap<>(); RequestConvertersTests.setRandomTimeout(rolloverRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - RequestConvertersTests.setRandomMasterTimeout(rolloverRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(rolloverRequest, expectedParams); if (OpenSearchTestCase.randomBoolean()) { rolloverRequest.dryRun(OpenSearchTestCase.randomBoolean()); if (rolloverRequest.isDryRun()) { @@ -796,7 +796,7 @@ public void testIndexPutSettings() throws IOException { String[] indices = OpenSearchTestCase.randomBoolean() ? null : RequestConvertersTests.randomIndicesNames(0, 2); UpdateSettingsRequest updateSettingsRequest = new UpdateSettingsRequest(indices); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(updateSettingsRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(updateSettingsRequest, expectedParams); RequestConvertersTests.setRandomTimeout(updateSettingsRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); RequestConvertersTests.setRandomIndicesOptions( updateSettingsRequest::indicesOptions, @@ -866,7 +866,7 @@ public void testPutTemplateRequest() throws Exception { putTemplateRequest.cause(cause); expectedParams.put("cause", cause); } - RequestConvertersTests.setRandomMasterTimeout(putTemplateRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(putTemplateRequest, expectedParams); Request request = IndicesRequestConverters.putTemplate(putTemplateRequest); Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + names.get(putTemplateRequest.name()))); @@ -917,7 +917,7 @@ public void testGetTemplateRequest() throws Exception { List names = OpenSearchTestCase.randomSubsetOf(1, encodes.keySet()); GetIndexTemplatesRequest getTemplatesRequest = new GetIndexTemplatesRequest(names); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(getTemplatesRequest::setMasterNodeTimeout, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getTemplatesRequest::setMasterNodeTimeout, expectedParams); RequestConvertersTests.setRandomLocal(getTemplatesRequest::setLocal, expectedParams); Request request = IndicesRequestConverters.getTemplates(getTemplatesRequest); @@ -946,7 +946,7 @@ public void testTemplatesExistRequest() { ); final Map expectedParams = new HashMap<>(); final IndexTemplatesExistRequest indexTemplatesExistRequest = new IndexTemplatesExistRequest(names); - RequestConvertersTests.setRandomMasterTimeout(indexTemplatesExistRequest::setMasterNodeTimeout, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(indexTemplatesExistRequest::setMasterNodeTimeout, expectedParams); RequestConvertersTests.setRandomLocal(indexTemplatesExistRequest::setLocal, expectedParams); assertThat(indexTemplatesExistRequest.names(), equalTo(names)); @@ -973,7 +973,7 @@ public void testDeleteTemplateRequest() { encodes.put("foo^bar", "foo%5Ebar"); DeleteIndexTemplateRequest deleteTemplateRequest = new DeleteIndexTemplateRequest().name(randomFrom(encodes.keySet())); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(deleteTemplateRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(deleteTemplateRequest, expectedParams); Request request = IndicesRequestConverters.deleteTemplate(deleteTemplateRequest); Assert.assertThat(request.getMethod(), equalTo(HttpDelete.METHOD_NAME)); Assert.assertThat(request.getEndpoint(), equalTo("/_template/" + encodes.get(deleteTemplateRequest.name()))); @@ -985,7 +985,7 @@ public void testDeleteAlias() { DeleteAliasRequest deleteAliasRequest = new DeleteAliasRequest(randomAlphaOfLength(4), randomAlphaOfLength(4)); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(deleteAliasRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(deleteAliasRequest, expectedParams); RequestConvertersTests.setRandomTimeout(deleteAliasRequest, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request request = IndicesRequestConverters.deleteAlias(deleteAliasRequest); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java index 0d95b3e7fddc0..e0c7f69325f87 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IngestRequestConvertersTests.java @@ -62,7 +62,7 @@ public void testPutPipeline() throws IOException { XContentType.JSON ); Map expectedParams = new HashMap<>(); - RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(request, expectedParams); RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request expectedRequest = IngestRequestConverters.putPipeline(request); @@ -78,7 +78,7 @@ public void testGetPipeline() { String pipelineId = "some_pipeline_id"; Map expectedParams = new HashMap<>(); GetPipelineRequest request = new GetPipelineRequest("some_pipeline_id"); - RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(request, expectedParams); Request expectedRequest = IngestRequestConverters.getPipeline(request); StringJoiner endpoint = new StringJoiner("/", "/", ""); endpoint.add("_ingest/pipeline"); @@ -92,7 +92,7 @@ public void testDeletePipeline() { String pipelineId = "some_pipeline_id"; Map expectedParams = new HashMap<>(); DeletePipelineRequest request = new DeletePipelineRequest(pipelineId); - RequestConvertersTests.setRandomMasterTimeout(request, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(request, expectedParams); RequestConvertersTests.setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request expectedRequest = IngestRequestConverters.deletePipeline(request); StringJoiner endpoint = new StringJoiner("/", "/", ""); diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index 32c6cde0725b4..66581fdc42c2b 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -1663,7 +1663,7 @@ public void testPutScript() throws Exception { } Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(putStoredScriptRequest, expectedParams); + setRandomClusterManagerTimeout(putStoredScriptRequest, expectedParams); setRandomTimeout(putStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); if (randomBoolean()) { @@ -1694,7 +1694,7 @@ public void testAnalyzeRequest() throws Exception { public void testGetScriptRequest() { GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script"); Map expectedParams = new HashMap<>(); - setRandomMasterTimeout(getStoredScriptRequest, expectedParams); + setRandomClusterManagerTimeout(getStoredScriptRequest, expectedParams); Request request = RequestConverters.getScript(getStoredScriptRequest); assertThat(request.getEndpoint(), equalTo("/_scripts/" + getStoredScriptRequest.id())); @@ -1708,7 +1708,7 @@ public void testDeleteScriptRequest() { Map expectedParams = new HashMap<>(); setRandomTimeout(deleteStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); - setRandomMasterTimeout(deleteStoredScriptRequest, expectedParams); + setRandomClusterManagerTimeout(deleteStoredScriptRequest, expectedParams); Request request = RequestConverters.deleteScript(deleteStoredScriptRequest); assertThat(request.getEndpoint(), equalTo("/_scripts/" + deleteStoredScriptRequest.id())); @@ -2105,34 +2105,34 @@ static void setRandomTimeoutTimeValue(Consumer setter, TimeValue defa } } - static void setRandomMasterTimeout(MasterNodeRequest request, Map expectedParams) { - setRandomMasterTimeout(request::masterNodeTimeout, expectedParams); + static void setRandomClusterManagerTimeout(MasterNodeRequest request, Map expectedParams) { + setRandomClusterManagerTimeout(request::masterNodeTimeout, expectedParams); } - static void setRandomMasterTimeout(TimedRequest request, Map expectedParams) { - setRandomMasterTimeout( + static void setRandomClusterManagerTimeout(TimedRequest request, Map expectedParams) { + setRandomClusterManagerTimeout( s -> request.setMasterTimeout(TimeValue.parseTimeValue(s, request.getClass().getName() + ".masterNodeTimeout")), expectedParams ); } - static void setRandomMasterTimeout(Consumer setter, Map expectedParams) { + static void setRandomClusterManagerTimeout(Consumer setter, Map expectedParams) { if (randomBoolean()) { - String masterTimeout = randomTimeValue(); - setter.accept(masterTimeout); - expectedParams.put("master_timeout", masterTimeout); + String clusterManagerTimeout = randomTimeValue(); + setter.accept(clusterManagerTimeout); + expectedParams.put("cluster_manager_timeout", clusterManagerTimeout); } else { - expectedParams.put("master_timeout", MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT.getStringRep()); + expectedParams.put("cluster_manager_timeout", MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT.getStringRep()); } } - static void setRandomMasterTimeout(Consumer setter, TimeValue defaultTimeout, Map expectedParams) { + static void setRandomClusterManagerTimeout(Consumer setter, TimeValue defaultTimeout, Map expectedParams) { if (randomBoolean()) { - TimeValue masterTimeout = TimeValue.parseTimeValue(randomTimeValue(), "random_master_timeout"); - setter.accept(masterTimeout); - expectedParams.put("master_timeout", masterTimeout.getStringRep()); + TimeValue clusterManagerTimeout = TimeValue.parseTimeValue(randomTimeValue(), "random_master_timeout"); + setter.accept(clusterManagerTimeout); + expectedParams.put("cluster_manager_timeout", clusterManagerTimeout.getStringRep()); } else { - expectedParams.put("master_timeout", defaultTimeout.getStringRep()); + expectedParams.put("cluster_manager_timeout", defaultTimeout.getStringRep()); } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java index f18679127bf2b..c75f0cff171f2 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java @@ -70,7 +70,7 @@ public void testGetRepositories() { StringBuilder endpoint = new StringBuilder("/_snapshot"); GetRepositoriesRequest getRepositoriesRequest = new GetRepositoriesRequest(); - RequestConvertersTests.setRandomMasterTimeout(getRepositoriesRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getRepositoriesRequest, expectedParams); RequestConvertersTests.setRandomLocal(getRepositoriesRequest::local, expectedParams); if (randomBoolean()) { @@ -121,7 +121,7 @@ public void testDeleteRepository() { DeleteRepositoryRequest deleteRepositoryRequest = new DeleteRepositoryRequest(); deleteRepositoryRequest.name(repository); - RequestConvertersTests.setRandomMasterTimeout(deleteRepositoryRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(deleteRepositoryRequest, expectedParams); RequestConvertersTests.setRandomTimeout(deleteRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request request = SnapshotRequestConverters.deleteRepository(deleteRepositoryRequest); @@ -137,7 +137,7 @@ public void testVerifyRepository() { String endpoint = "/_snapshot/" + repository + "/_verify"; VerifyRepositoryRequest verifyRepositoryRequest = new VerifyRepositoryRequest(repository); - RequestConvertersTests.setRandomMasterTimeout(verifyRepositoryRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(verifyRepositoryRequest, expectedParams); RequestConvertersTests.setRandomTimeout(verifyRepositoryRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); Request request = SnapshotRequestConverters.verifyRepository(verifyRepositoryRequest); @@ -153,7 +153,7 @@ public void testCreateSnapshot() throws IOException { String endpoint = "/_snapshot/" + repository + "/" + snapshot; CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); - RequestConvertersTests.setRandomMasterTimeout(createSnapshotRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(createSnapshotRequest, expectedParams); Boolean waitForCompletion = randomBoolean(); createSnapshotRequest.waitForCompletion(waitForCompletion); @@ -177,7 +177,7 @@ public void testGetSnapshots() { GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(); getSnapshotsRequest.repository(repository); getSnapshotsRequest.snapshots(Arrays.asList(snapshot1, snapshot2).toArray(new String[0])); - RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getSnapshotsRequest, expectedParams); if (randomBoolean()) { boolean ignoreUnavailable = randomBoolean(); @@ -209,7 +209,7 @@ public void testGetAllSnapshots() { String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/_all", repository); GetSnapshotsRequest getSnapshotsRequest = new GetSnapshotsRequest(repository); - RequestConvertersTests.setRandomMasterTimeout(getSnapshotsRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(getSnapshotsRequest, expectedParams); boolean ignoreUnavailable = randomBoolean(); getSnapshotsRequest.ignoreUnavailable(ignoreUnavailable); @@ -238,7 +238,7 @@ public void testSnapshotsStatus() { String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status"; SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots); - RequestConvertersTests.setRandomMasterTimeout(snapshotsStatusRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(snapshotsStatusRequest, expectedParams); snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable); expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); @@ -256,7 +256,7 @@ public void testRestoreSnapshot() throws IOException { String endpoint = String.format(Locale.ROOT, "/_snapshot/%s/%s/_restore", repository, snapshot); RestoreSnapshotRequest restoreSnapshotRequest = new RestoreSnapshotRequest(repository, snapshot); - RequestConvertersTests.setRandomMasterTimeout(restoreSnapshotRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(restoreSnapshotRequest, expectedParams); boolean waitForCompletion = randomBoolean(); restoreSnapshotRequest.waitForCompletion(waitForCompletion); expectedParams.put("wait_for_completion", Boolean.toString(waitForCompletion)); @@ -264,7 +264,7 @@ public void testRestoreSnapshot() throws IOException { if (randomBoolean()) { String timeout = randomTimeValue(); restoreSnapshotRequest.masterNodeTimeout(timeout); - expectedParams.put("master_timeout", timeout); + expectedParams.put("cluster_manager_timeout", timeout); } Request request = SnapshotRequestConverters.restoreSnapshot(restoreSnapshotRequest); @@ -284,7 +284,7 @@ public void testDeleteSnapshot() { DeleteSnapshotRequest deleteSnapshotRequest = new DeleteSnapshotRequest(); deleteSnapshotRequest.repository(repository); deleteSnapshotRequest.snapshots(snapshot); - RequestConvertersTests.setRandomMasterTimeout(deleteSnapshotRequest, expectedParams); + RequestConvertersTests.setRandomClusterManagerTimeout(deleteSnapshotRequest, expectedParams); Request request = SnapshotRequestConverters.deleteSnapshot(deleteSnapshotRequest); assertThat(request.getEndpoint(), equalTo(endpoint)); From b45bfc9afc5ae02978d578da543e9a2630e1965e Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Tue, 19 Apr 2022 13:39:42 -0700 Subject: [PATCH 29/41] Revert "Make High-Level-Rest-Client tests allow deprecation warning temporarily, during deprecation of request parameter 'master_timeout' (#2702)" (#2744) This reverts commit 6a2a33d1872850b04562164c39621698cb99d7b8. During the process of deprecating REST API request parameter master_timeout and adding alternative parameter cluster_manager_timeout, I made High-Level-Rest-Client tests allow deprecation warning temporarily, by changing the argument of `setStrictDeprecationMode()` to false when building `RestClient` for tests, in the above commit / PR https://github.com/opensearch-project/OpenSearch/pull/2702, This PR sets the High-Level-Rest-Client tests back to treating warning header as a failure. Signed-off-by: Tianli Feng --- .../java/org/opensearch/test/rest/OpenSearchRestTestCase.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java index f976b3619102a..9624a9d3d0554 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/OpenSearchRestTestCase.java @@ -824,8 +824,7 @@ protected String getProtocol() { protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); configureClient(builder, settings); - // TODO: set the method argument to 'true' after PR https://github.com/opensearch-project/OpenSearch/pull/2683 merged. - builder.setStrictDeprecationMode(false); + builder.setStrictDeprecationMode(true); return builder.build(); } From 6915df94c3a322a4fb7b6808e1f63a743a3af6be Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 20 Apr 2022 15:05:13 -0500 Subject: [PATCH 30/41] [Remove] remaining AllFieldMapper references (#3007) AllFieldMapper was deprecated in legacy 6x. The remaining references are removed, along with the field mapper and corresponding tests. Signed-off-by: Nicholas Walter Knize --- .../index/mapper/AllFieldMapper.java | 134 ------------------ .../indices/mapper/MapperRegistry.java | 10 +- .../index/mapper/AllFieldMapperTests.java | 63 -------- .../indices/IndicesModuleTests.java | 14 -- 4 files changed, 1 insertion(+), 220 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/index/mapper/AllFieldMapper.java delete mode 100644 server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java diff --git a/server/src/main/java/org/opensearch/index/mapper/AllFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/AllFieldMapper.java deleted file mode 100644 index 634424d6f45a4..0000000000000 --- a/server/src/main/java/org/opensearch/index/mapper/AllFieldMapper.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.mapper; - -import org.apache.lucene.document.FieldType; -import org.apache.lucene.index.IndexOptions; -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.common.Explicit; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.search.lookup.SearchLookup; - -import java.util.Collections; -import java.util.List; - -/** - * Noop mapper that ensures that mappings created in 6x that explicitly disable the _all field - * can be restored in this version. - * - * TODO: Remove in 8 - */ -public class AllFieldMapper extends MetadataFieldMapper { - public static final String NAME = "_all"; - public static final String CONTENT_TYPE = "_all"; - - public static class Defaults { - public static final FieldType FIELD_TYPE = new FieldType(); - - static { - FIELD_TYPE.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); - FIELD_TYPE.setTokenized(true); - FIELD_TYPE.freeze(); - } - } - - private static AllFieldMapper toType(FieldMapper in) { - return (AllFieldMapper) in; - } - - public static class Builder extends MetadataFieldMapper.Builder { - - private final Parameter> enabled = updateableBoolParam("enabled", m -> toType(m).enabled, false); - - public Builder() { - super(NAME); - } - - @Override - protected List> getParameters() { - return Collections.singletonList(enabled); - } - - @Override - public AllFieldMapper build(BuilderContext context) { - if (enabled.getValue().value()) { - throw new IllegalArgumentException("[_all] is disabled in this version."); - } - return new AllFieldMapper(enabled.getValue()); - } - } - - public static final TypeParser PARSER = new ConfigurableTypeParser( - c -> new AllFieldMapper(new Explicit<>(false, false)), - c -> new Builder() - ); - - static final class AllFieldType extends StringFieldType { - AllFieldType() { - super(NAME, false, false, false, TextSearchInfo.NONE, Collections.emptyMap()); - } - - @Override - public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup searchLookup, String format) { - throw new UnsupportedOperationException(); - } - - @Override - public String typeName() { - return CONTENT_TYPE; - } - - @Override - public Query existsQuery(QueryShardContext context) { - return new MatchNoDocsQuery(); - } - } - - private final Explicit enabled; - - private AllFieldMapper(Explicit enabled) { - super(new AllFieldType()); - this.enabled = enabled; - } - - @Override - protected String contentType() { - return CONTENT_TYPE; - } - - @Override - public ParametrizedFieldMapper.Builder getMergeBuilder() { - return new Builder().init(this); - } -} diff --git a/server/src/main/java/org/opensearch/indices/mapper/MapperRegistry.java b/server/src/main/java/org/opensearch/indices/mapper/MapperRegistry.java index d37f82c7a484f..f56b2f98f0f6e 100644 --- a/server/src/main/java/org/opensearch/indices/mapper/MapperRegistry.java +++ b/server/src/main/java/org/opensearch/indices/mapper/MapperRegistry.java @@ -32,9 +32,7 @@ package org.opensearch.indices.mapper; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; -import org.opensearch.index.mapper.AllFieldMapper; import org.opensearch.index.mapper.Mapper; import org.opensearch.index.mapper.MetadataFieldMapper; import org.opensearch.plugins.MapperPlugin; @@ -52,7 +50,6 @@ public final class MapperRegistry { private final Map mapperParsers; private final Map metadataMapperParsers; - private final Map metadataMapperParsers6x; private final Function> fieldFilter; public MapperRegistry( @@ -62,11 +59,6 @@ public MapperRegistry( ) { this.mapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(mapperParsers)); this.metadataMapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(metadataMapperParsers)); - // add the _all field mapper for indices created in 6x - Map metadata6x = new LinkedHashMap<>(); - metadata6x.put(AllFieldMapper.NAME, AllFieldMapper.PARSER); - metadata6x.putAll(metadataMapperParsers); - this.metadataMapperParsers6x = Collections.unmodifiableMap(metadata6x); this.fieldFilter = fieldFilter; } @@ -83,7 +75,7 @@ public Map getMapperParsers() { * returned map uses the name of the field as a key. */ public Map getMetadataMapperParsers(Version indexCreatedVersion) { - return indexCreatedVersion.onOrAfter(LegacyESVersion.V_7_0_0) ? metadataMapperParsers : metadataMapperParsers6x; + return metadataMapperParsers; } /** diff --git a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java deleted file mode 100644 index 625cfbb81f8bc..0000000000000 --- a/server/src/test/java/org/opensearch/index/mapper/AllFieldMapperTests.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.mapper; - -import org.opensearch.common.Strings; -import org.opensearch.common.compress.CompressedXContent; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentFactory; -import org.opensearch.index.IndexService; -import org.opensearch.index.mapper.MapperService.MergeReason; -import org.opensearch.test.OpenSearchSingleNodeTestCase; - -public class AllFieldMapperTests extends OpenSearchSingleNodeTestCase { - - @Override - protected boolean forbidPrivateIndexSettings() { - return false; - } - - public void testUpdateDefaultSearchAnalyzer() throws Exception { - IndexService indexService = createIndex( - "test", - Settings.builder() - .put("index.analysis.analyzer.default_search.type", "custom") - .put("index.analysis.analyzer.default_search.tokenizer", "standard") - .build() - ); - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("_doc").endObject().endObject()); - indexService.mapperService().merge("_doc", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); - assertEquals(mapping, indexService.mapperService().documentMapper().mapping().toString()); - } - -} diff --git a/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java b/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java index ef78b24be4c08..c2298f60e4a2b 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java @@ -33,7 +33,6 @@ package org.opensearch.indices; import org.opensearch.Version; -import org.opensearch.index.mapper.AllFieldMapper; import org.opensearch.index.mapper.DataStreamFieldMapper; import org.opensearch.index.mapper.FieldNamesFieldMapper; import org.opensearch.index.mapper.IdFieldMapper; @@ -101,19 +100,6 @@ public Map getMetadataMappers() { SeqNoFieldMapper.NAME, FieldNamesFieldMapper.NAME }; - private static String[] EXPECTED_METADATA_FIELDS_6x = new String[] { - AllFieldMapper.NAME, - IgnoredFieldMapper.NAME, - IdFieldMapper.NAME, - RoutingFieldMapper.NAME, - IndexFieldMapper.NAME, - DataStreamFieldMapper.NAME, - SourceFieldMapper.NAME, - TypeFieldMapper.NAME, - VersionFieldMapper.NAME, - SeqNoFieldMapper.NAME, - FieldNamesFieldMapper.NAME }; - public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); { From 3b7e6547572b23eda7bb7313850d4a3cb049a9be Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 20 Apr 2022 17:14:02 -0400 Subject: [PATCH 31/41] Remove JavaVersion, use builtin Runtime.Version to deal with runtime versions (#3006) Signed-off-by: Andriy Redko --- .../org/opensearch/bootstrap/JarHell.java | 17 +- .../org/opensearch/bootstrap/JavaVersion.java | 146 ------------------ .../opensearch/bootstrap/JarHellTests.java | 11 +- .../core/internal/net/NetUtilsTests.java | 5 +- .../common/ssl/SslConfigurationLoader.java | 40 ++--- .../ingest/common/DateFormatTests.java | 9 -- .../org/opensearch/painless/ArrayTests.java | 7 +- .../netty4/SimpleNetty4TransportTests.java | 6 +- .../AzureDiscoveryClusterFormationTests.java | 12 +- .../ingest/attachment/TikaImpl.java | 9 -- ...eCloudStorageBlobStoreRepositoryTests.java | 18 --- ...CloudStorageBlobContainerRetriesTests.java | 17 -- .../repositories/hdfs/HdfsTests.java | 2 - .../nio/SimpleNioTransportTests.java | 6 +- .../aggregations/bucket/DateHistogramIT.java | 10 +- .../search/query/SearchQueryIT.java | 2 - .../org/opensearch/bootstrap/Bootstrap.java | 10 -- .../opensearch/bootstrap/BootstrapChecks.java | 57 ------- .../bootstrap/BootstrapChecksTests.java | 76 --------- .../bootstrap/JavaVersionTests.java | 87 ++++------- .../common/LocalTimeOffsetTests.java | 4 +- .../joda/JavaJodaTimeDuellingTests.java | 22 --- .../common/time/DateFormattersTests.java | 6 - .../common/time/JavaDateMathParserTests.java | 6 - .../index/mapper/DateFieldMapperTests.java | 3 - .../opensearch/monitor/jvm/JvmInfoTests.java | 4 +- .../plugins/IndexStorePluginTests.java | 42 ++--- .../plugins/PluginsServiceTests.java | 2 +- .../test/OpenSearchIntegTestCase.java | 4 +- .../opensearch/test/OpenSearchTestCase.java | 13 +- .../junit/listeners/ReproduceInfoPrinter.java | 3 +- .../opensearch/test/rest/yaml/Features.java | 4 +- 32 files changed, 92 insertions(+), 568 deletions(-) delete mode 100644 libs/core/src/main/java/org/opensearch/bootstrap/JavaVersion.java diff --git a/libs/core/src/main/java/org/opensearch/bootstrap/JarHell.java b/libs/core/src/main/java/org/opensearch/bootstrap/JarHell.java index 843a6b982d7ff..d945697b21c0b 100644 --- a/libs/core/src/main/java/org/opensearch/bootstrap/JarHell.java +++ b/libs/core/src/main/java/org/opensearch/bootstrap/JarHell.java @@ -36,6 +36,7 @@ import org.opensearch.common.io.PathUtils; import java.io.IOException; +import java.lang.Runtime.Version; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; @@ -250,7 +251,9 @@ private static void checkManifest(Manifest manifest, Path jar) { } public static void checkVersionFormat(String targetVersion) { - if (!JavaVersion.isValid(targetVersion)) { + try { + Version.parse(targetVersion); + } catch (final IllegalArgumentException ex) { throw new IllegalStateException( String.format( Locale.ROOT, @@ -267,16 +270,10 @@ public static void checkVersionFormat(String targetVersion) { * required by {@code resource} is compatible with the current installation. */ public static void checkJavaVersion(String resource, String targetVersion) { - JavaVersion version = JavaVersion.parse(targetVersion); - if (JavaVersion.current().compareTo(version) < 0) { + Version version = Version.parse(targetVersion); + if (Runtime.version().compareTo(version) < 0) { throw new IllegalStateException( - String.format( - Locale.ROOT, - "%s requires Java %s:, your system: %s", - resource, - targetVersion, - JavaVersion.current().toString() - ) + String.format(Locale.ROOT, "%s requires Java %s:, your system: %s", resource, targetVersion, Runtime.version().toString()) ); } } diff --git a/libs/core/src/main/java/org/opensearch/bootstrap/JavaVersion.java b/libs/core/src/main/java/org/opensearch/bootstrap/JavaVersion.java deleted file mode 100644 index 236563bf8bd89..0000000000000 --- a/libs/core/src/main/java/org/opensearch/bootstrap/JavaVersion.java +++ /dev/null @@ -1,146 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.bootstrap; - -import java.math.BigInteger; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -public class JavaVersion implements Comparable { - - private final List version; - private final String prePart; - - public List getVersion() { - return version; - } - - private JavaVersion(List version, String prePart) { - this.prePart = prePart; - if (version.size() >= 2 && version.get(0) == 1 && version.get(1) == 8) { - // for Java 8 there is ambiguity since both 1.8 and 8 are supported, - // so we rewrite the former to the latter - version = new ArrayList<>(version.subList(1, version.size())); - } - this.version = Collections.unmodifiableList(version); - } - - /** - * Parses the Java version as it can be retrieved as the value of java.version or - * java.specification.version according to JEP 223. - * - * @param value The version String - */ - public static JavaVersion parse(String value) { - Objects.requireNonNull(value); - String prePart = null; - if (!isValid(value)) { - throw new IllegalArgumentException("Java version string [" + value + "] could not be parsed."); - } - List version = new ArrayList<>(); - String[] parts = value.split("-"); - String[] numericComponents; - if (parts.length == 1) { - numericComponents = value.split("\\."); - } else if (parts.length == 2) { - numericComponents = parts[0].split("\\."); - prePart = parts[1]; - } else { - throw new IllegalArgumentException("Java version string [" + value + "] could not be parsed."); - } - - for (String component : numericComponents) { - version.add(Integer.valueOf(component)); - } - return new JavaVersion(version, prePart); - } - - public static boolean isValid(String value) { - return value.matches("^0*[0-9]+(\\.[0-9]+)*(-[a-zA-Z0-9]+)?$"); - } - - private static final JavaVersion CURRENT = parse(System.getProperty("java.specification.version")); - - public static JavaVersion current() { - return CURRENT; - } - - @Override - public int compareTo(JavaVersion o) { - int len = Math.max(version.size(), o.version.size()); - for (int i = 0; i < len; i++) { - int d = (i < version.size() ? version.get(i) : 0); - int s = (i < o.version.size() ? o.version.get(i) : 0); - if (s < d) return 1; - if (s > d) return -1; - } - if (prePart != null && o.prePart == null) { - return -1; - } else if (prePart == null && o.prePart != null) { - return 1; - } else if (prePart != null && o.prePart != null) { - return comparePrePart(prePart, o.prePart); - } - return 0; - } - - private int comparePrePart(String prePart, String otherPrePart) { - if (prePart.matches("\\d+")) { - return otherPrePart.matches("\\d+") ? (new BigInteger(prePart)).compareTo(new BigInteger(otherPrePart)) : -1; - } else { - return otherPrePart.matches("\\d+") ? 1 : prePart.compareTo(otherPrePart); - } - } - - @Override - public boolean equals(Object o) { - if (o == null || o.getClass() != getClass()) { - return false; - } - return compareTo((JavaVersion) o) == 0; - } - - @Override - public int hashCode() { - return version.hashCode(); - } - - @Override - public String toString() { - final String versionString = version.stream().map(v -> Integer.toString(v)).collect(Collectors.joining(".")); - return prePart != null ? versionString + "-" + prePart : versionString; - } -} diff --git a/libs/core/src/test/java/org/opensearch/bootstrap/JarHellTests.java b/libs/core/src/test/java/org/opensearch/bootstrap/JarHellTests.java index d0e411ae8e3c2..57f5f393ce49f 100644 --- a/libs/core/src/test/java/org/opensearch/bootstrap/JarHellTests.java +++ b/libs/core/src/test/java/org/opensearch/bootstrap/JarHellTests.java @@ -37,6 +37,7 @@ import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; +import java.lang.Runtime.Version; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -156,12 +157,12 @@ public void testXmlBeansLeniency() throws Exception { public void testRequiredJDKVersionTooOld() throws Exception { Path dir = createTempDir(); - List current = JavaVersion.current().getVersion(); + List current = Runtime.version().version(); List target = new ArrayList<>(current.size()); for (int i = 0; i < current.size(); i++) { target.add(current.get(i) + 1); } - JavaVersion targetVersion = JavaVersion.parse(Strings.collectionToDelimitedString(target, ".")); + Version targetVersion = Version.parse(Strings.collectionToDelimitedString(target, ".")); Manifest manifest = new Manifest(); Attributes attributes = manifest.getMainAttributes(); @@ -173,7 +174,7 @@ public void testRequiredJDKVersionTooOld() throws Exception { fail("did not get expected exception"); } catch (IllegalStateException e) { assertTrue(e.getMessage().contains("requires Java " + targetVersion.toString())); - assertTrue(e.getMessage().contains("your system: " + JavaVersion.current().toString())); + assertTrue(e.getMessage().contains("your system: " + Runtime.version().toString())); } } @@ -209,7 +210,7 @@ public void testRequiredJDKVersionIsOK() throws Exception { } public void testValidVersions() { - String[] versions = new String[] { "1.7", "1.7.0", "0.1.7", "1.7.0.80" }; + String[] versions = new String[] { "12-ea", "13.0.2.3-ea", "14-something", "11.0.2-21002", "11.0.14.1+1", "17.0.2+8" }; for (String version : versions) { try { JarHell.checkVersionFormat(version); @@ -220,7 +221,7 @@ public void testValidVersions() { } public void testInvalidVersions() { - String[] versions = new String[] { "", "1.7.0_80", "1.7." }; + String[] versions = new String[] { "", "1.7.0_80", "1.7.", "11.2+something-else" }; for (String version : versions) { try { JarHell.checkVersionFormat(version); diff --git a/libs/core/src/test/java/org/opensearch/core/internal/net/NetUtilsTests.java b/libs/core/src/test/java/org/opensearch/core/internal/net/NetUtilsTests.java index 91236b4f8fb84..448c39731def5 100644 --- a/libs/core/src/test/java/org/opensearch/core/internal/net/NetUtilsTests.java +++ b/libs/core/src/test/java/org/opensearch/core/internal/net/NetUtilsTests.java @@ -33,15 +33,16 @@ package org.opensearch.core.internal.net; import org.apache.lucene.util.Constants; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.core.internal.io.IOUtils; import org.opensearch.test.OpenSearchTestCase; +import java.lang.Runtime.Version; + public class NetUtilsTests extends OpenSearchTestCase { public void testExtendedSocketOptions() { assumeTrue("JDK possibly not supported", Constants.JVM_NAME.contains("HotSpot") || Constants.JVM_NAME.contains("OpenJDK")); - assumeTrue("JDK version not supported", JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0); + assumeTrue("JDK version not supported", Runtime.version().compareTo(Version.parse("11")) >= 0); assumeTrue("Platform possibly not supported", IOUtils.LINUX || IOUtils.MAC_OS_X); assertNotNull(NetUtils.getTcpKeepIdleSocketOptionOrNull()); assertNotNull(NetUtils.getTcpKeepIntervalSocketOptionOrNull()); diff --git a/libs/ssl-config/src/main/java/org/opensearch/common/ssl/SslConfigurationLoader.java b/libs/ssl-config/src/main/java/org/opensearch/common/ssl/SslConfigurationLoader.java index 6f2670d285e84..2cd9f4f31fc7f 100644 --- a/libs/ssl-config/src/main/java/org/opensearch/common/ssl/SslConfigurationLoader.java +++ b/libs/ssl-config/src/main/java/org/opensearch/common/ssl/SslConfigurationLoader.java @@ -32,8 +32,6 @@ package org.opensearch.common.ssl; -import org.opensearch.bootstrap.JavaVersion; - import javax.crypto.Cipher; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; @@ -361,7 +359,6 @@ private List resolveListSetting(String key, Function parser, L private static List loadDefaultCiphers() { final boolean has256BitAES = has256BitAES(); - final boolean useGCM = JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0; final boolean tlsV13Supported = DEFAULT_PROTOCOLS.contains("TLSv1.3"); List ciphers = new ArrayList<>(); if (tlsV13Supported) { // TLSv1.3 cipher has PFS, AEAD, hardware support @@ -370,19 +367,18 @@ private static List loadDefaultCiphers() { } ciphers.add("TLS_AES_128_GCM_SHA256"); } - if (useGCM) { // PFS, AEAD, hardware support - if (has256BitAES) { - ciphers.addAll( - Arrays.asList( - "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", - "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", - "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256" - ) - ); - } else { - ciphers.addAll(Arrays.asList("TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256")); - } + // use GCM: PFS, AEAD, hardware support + if (has256BitAES) { + ciphers.addAll( + Arrays.asList( + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256" + ) + ); + } else { + ciphers.addAll(Arrays.asList("TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256")); } // PFS, hardware support @@ -410,13 +406,11 @@ private static List loadDefaultCiphers() { ); } - // AEAD, hardware support - if (useGCM) { - if (has256BitAES) { - ciphers.addAll(Arrays.asList("TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256")); - } else { - ciphers.add("TLS_RSA_WITH_AES_128_GCM_SHA256"); - } + // use GCM: AEAD, hardware support + if (has256BitAES) { + ciphers.addAll(Arrays.asList("TLS_RSA_WITH_AES_256_GCM_SHA384", "TLS_RSA_WITH_AES_128_GCM_SHA256")); + } else { + ciphers.add("TLS_RSA_WITH_AES_128_GCM_SHA256"); } // hardware support diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java index 951b93deb6e8b..04900fe6f7496 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/DateFormatTests.java @@ -32,7 +32,6 @@ package org.opensearch.ingest.common; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateUtils; import org.opensearch.test.OpenSearchTestCase; @@ -96,10 +95,6 @@ public void testParseJavaDefaultYear() { } public void testParseWeekBased() { - assumeFalse( - "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); String format = randomFrom("YYYY-ww"); ZoneId timezone = DateUtils.of("Europe/Amsterdam"); Function javaFunction = DateFormat.Java.getFunction(format, timezone, Locale.ROOT); @@ -108,10 +103,6 @@ public void testParseWeekBased() { } public void testParseWeekBasedWithLocale() { - assumeFalse( - "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); String format = randomFrom("YYYY-ww"); ZoneId timezone = DateUtils.of("Europe/Amsterdam"); Function javaFunction = DateFormat.Java.getFunction(format, timezone, Locale.US); diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java index 7563ab87fd5e6..0b83a4c558ef6 100644 --- a/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/ArrayTests.java @@ -33,7 +33,6 @@ package org.opensearch.painless; import org.apache.lucene.util.Constants; -import org.opensearch.bootstrap.JavaVersion; import org.hamcrest.Matcher; import java.lang.invoke.MethodHandle; @@ -55,11 +54,7 @@ protected String valueCtorCall(String valueType, int size) { @Override protected Matcher outOfBoundsExceptionMessageMatcher(int index, int size) { - if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { - return equalTo(Integer.toString(index)); - } else { - return equalTo("Index " + Integer.toString(index) + " out of bounds for length " + Integer.toString(size)); - } + return equalTo("Index " + Integer.toString(index) + " out of bounds for length " + Integer.toString(size)); } public void testArrayLengthHelper() throws Throwable { diff --git a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java index b690ba9e35e35..848819ee5b840 100644 --- a/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java +++ b/modules/transport-netty4/src/test/java/org/opensearch/transport/netty4/SimpleNetty4TransportTests.java @@ -34,7 +34,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionListener; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; @@ -121,10 +120,7 @@ public void testConnectException() throws UnknownHostException { } public void testDefaultKeepAliveSettings() throws IOException { - assumeTrue( - "setting default keepalive options not supported on this platform", - (IOUtils.LINUX || IOUtils.MAC_OS_X) && JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0 - ); + assumeTrue("setting default keepalive options not supported on this platform", (IOUtils.LINUX || IOUtils.MAC_OS_X)); try ( MockTransportService serviceC = buildService("TS_C", Version.CURRENT, Settings.EMPTY); MockTransportService serviceD = buildService("TS_D", Version.CURRENT, Settings.EMPTY) diff --git a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index c4f533fd2ee36..570aa98cd9f55 100644 --- a/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/internalClusterTest/java/org/opensearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -38,7 +38,6 @@ import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; import org.apache.logging.log4j.LogManager; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.cloud.azure.classic.management.AzureComputeService; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.FileSystemUtils; @@ -67,6 +66,7 @@ import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; +import java.lang.Runtime.Version; import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.charset.StandardCharsets; @@ -295,15 +295,13 @@ private static SSLContext getSSLContext() throws Exception { * 12.0.1 so we pin to TLSv1.2 when running on an earlier JDK */ private static String getProtocol() { - if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { - return "TLS"; - } else if (JavaVersion.current().compareTo(JavaVersion.parse("12")) < 0) { + if (Runtime.version().compareTo(Version.parse("12")) < 0) { return "TLSv1.2"; } else { - JavaVersion full = AccessController.doPrivileged( - (PrivilegedAction) () -> JavaVersion.parse(System.getProperty("java.version")) + Version full = AccessController.doPrivileged( + (PrivilegedAction) () -> Version.parse(System.getProperty("java.version")) ); - if (full.compareTo(JavaVersion.parse("12.0.1")) < 0) { + if (full.compareTo(Version.parse("12.0.1")) < 0) { return "TLSv1.2"; } } diff --git a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java index 2451eee8e984b..aad490924d311 100644 --- a/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/opensearch/ingest/attachment/TikaImpl.java @@ -42,7 +42,6 @@ import org.opensearch.SpecialPermission; import org.opensearch.bootstrap.FilePermissionUtils; import org.opensearch.bootstrap.JarHell; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.io.PathUtils; @@ -181,14 +180,6 @@ static PermissionCollection getRestrictedPermissions() { perms.add(new RuntimePermission("accessClassInPackage.sun.java2d.cmm.kcms")); // xmlbeans, use by POI, needs to get the context classloader perms.add(new RuntimePermission("getClassLoader")); - // ZipFile needs accessDeclaredMembers on JDK 10; cf. https://bugs.openjdk.java.net/browse/JDK-8187485 - if (JavaVersion.current().compareTo(JavaVersion.parse("10")) >= 0) { - if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { - // TODO remove this and from plugin-security.policy when JDK 11 is the only one we support - // this is needed pre 11, but it's fixed in 11 : https://bugs.openjdk.java.net/browse/JDK-8187485 - perms.add(new RuntimePermission("accessDeclaredMembers")); - } - } perms.setReadOnly(); return perms; } diff --git a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 4bc59b6ae6553..274a416d57431 100644 --- a/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/internalClusterTest/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -44,7 +44,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.opensearch.action.ActionRunnable; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.cluster.metadata.RepositoryMetadata; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.SuppressForbidden; @@ -67,7 +66,6 @@ import org.opensearch.repositories.blobstore.BlobStoreRepository; import org.opensearch.repositories.blobstore.OpenSearchMockAPIBasedRepositoryIntegTestCase; -import org.junit.BeforeClass; import org.threeten.bp.Duration; import java.io.IOException; @@ -88,22 +86,6 @@ @SuppressForbidden(reason = "this test uses a HttpServer to emulate a Google Cloud Storage endpoint") public class GoogleCloudStorageBlobStoreRepositoryTests extends OpenSearchMockAPIBasedRepositoryIntegTestCase { - - public static void assumeNotJava8() { - assumeFalse( - "This test is flaky on jdk8 - we suspect a JDK bug to trigger some assertion in the HttpServer implementation used " - + "to emulate the server side logic of Google Cloud Storage. See https://bugs.openjdk.java.net/browse/JDK-8180754, " - + "https://github.com/elastic/elasticsearch/pull/51933 and https://github.com/elastic/elasticsearch/issues/52906 " - + "for more background on this issue.", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); - } - - @BeforeClass - public static void skipJava8() { - assumeNotJava8(); - } - @Override protected String repositoryType() { return GoogleCloudStorageRepository.TYPE; diff --git a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index 6a589126a9466..616a1ae9feb4f 100644 --- a/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/plugins/repository-gcs/src/test/java/org/opensearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -39,7 +39,6 @@ import fixture.gcs.FakeOAuth2HttpHandler; import org.apache.http.HttpStatus; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.Nullable; import org.opensearch.common.Strings; import org.opensearch.common.SuppressForbidden; @@ -62,7 +61,6 @@ import org.opensearch.repositories.blobstore.OpenSearchMockAPIBasedRepositoryIntegTestCase; import org.opensearch.rest.RestStatus; import org.opensearch.rest.RestUtils; -import org.junit.BeforeClass; import org.threeten.bp.Duration; import java.io.IOException; @@ -107,21 +105,6 @@ private String httpServerUrl() { return "http://" + InetAddresses.toUriString(address.getAddress()) + ":" + address.getPort(); } - public static void assumeNotJava8() { - assumeFalse( - "This test is flaky on jdk8 - we suspect a JDK bug to trigger some assertion in the HttpServer implementation used " - + "to emulate the server side logic of Google Cloud Storage. See https://bugs.openjdk.java.net/browse/JDK-8180754, " - + "https://github.com/elastic/elasticsearch/pull/51933 and https://github.com/elastic/elasticsearch/issues/52906 " - + "for more background on this issue.", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); - } - - @BeforeClass - public static void skipJava8() { - assumeNotJava8(); - } - @Override protected String downloadStorageEndpoint(String blob) { return "/download/storage/v1/b/bucket/o/" + blob; diff --git a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java index 46d97f41b604f..d46d0b2092d2a 100644 --- a/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java +++ b/plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java @@ -36,7 +36,6 @@ import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterState; import org.opensearch.common.settings.Settings; @@ -63,7 +62,6 @@ protected Collection> getPlugins() { } public void testSimpleWorkflow() { - assumeFalse("https://github.com/elastic/elasticsearch/issues/31498", JavaVersion.current().equals(JavaVersion.parse("11"))); Client client = client(); AcknowledgedResponse putRepositoryResponse = client.admin() diff --git a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java index c8b9fa63383bf..230f89bbafe9f 100644 --- a/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/opensearch/transport/nio/SimpleNioTransportTests.java @@ -34,7 +34,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionListener; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.io.stream.NamedWriteableRegistry; import org.opensearch.common.network.NetworkService; @@ -122,10 +121,7 @@ public void testConnectException() throws UnknownHostException { } public void testDefaultKeepAliveSettings() throws IOException { - assumeTrue( - "setting default keepalive options not supported on this platform", - (IOUtils.LINUX || IOUtils.MAC_OS_X) && JavaVersion.current().compareTo(JavaVersion.parse("11")) >= 0 - ); + assumeTrue("setting default keepalive options not supported on this platform", (IOUtils.LINUX || IOUtils.MAC_OS_X)); try ( MockTransportService serviceC = buildService("TS_C", Version.CURRENT, Settings.EMPTY); MockTransportService serviceD = buildService("TS_D", Version.CURRENT, Settings.EMPTY) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java index 782bcde39ce8d..617c5745c9bba 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/DateHistogramIT.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; @@ -386,9 +385,6 @@ public void testSingleValued_timeZone_epoch() throws Exception { ZonedDateTime expectedKey = keyIterator.next(); String bucketKey = bucket.getKeyAsString(); String expectedBucketName = Long.toString(expectedKey.toInstant().toEpochMilli() / millisDivider); - if (JavaVersion.current().getVersion().get(0) == 8 && bucket.getKeyAsString().endsWith(".0")) { - expectedBucketName = expectedBucketName + ".0"; - } assertThat(bucketKey, equalTo(expectedBucketName)); assertThat(((ZonedDateTime) bucket.getKey()), equalTo(expectedKey)); assertThat(bucket.getDocCount(), equalTo(1L)); @@ -1509,11 +1505,7 @@ public void testRewriteTimeZone_EpochMillisFormat() throws InterruptedException, assertSearchResponse(response); Histogram histo = response.getAggregations().get("histo"); assertThat(histo.getBuckets().size(), equalTo(1)); - if (JavaVersion.current().getVersion().get(0) == 8 && histo.getBuckets().get(0).getKeyAsString().endsWith(".0")) { - assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000.0")); - } else { - assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); - } + assertThat(histo.getBuckets().get(0).getKeyAsString(), equalTo("1477954800000")); assertThat(histo.getBuckets().get(0).getDocCount(), equalTo(1L)); response = client().prepareSearch(index) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java index fed5561c1df64..c51043f02174d 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/query/SearchQueryIT.java @@ -44,7 +44,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchType; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.document.DocumentField; import org.opensearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause; import org.opensearch.common.regex.Regex; @@ -1860,7 +1859,6 @@ public void testRangeQueryWithTimeZone() throws Exception { * on "Configuring IDEs And Running Tests". */ public void testRangeQueryWithLocaleMapping() throws Exception { - assumeTrue("need java 9 for testing ", JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0); assert ("SPI,COMPAT".equals(System.getProperty("java.locale.providers"))) : "`-Djava.locale.providers=SPI,COMPAT` needs to be set"; assertAcked( diff --git a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java index 58ca3cdf78033..c0c0251538d01 100644 --- a/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java +++ b/server/src/main/java/org/opensearch/bootstrap/Bootstrap.java @@ -48,7 +48,6 @@ import org.opensearch.common.PidFile; import org.opensearch.common.SuppressForbidden; import org.opensearch.common.inject.CreationException; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.logging.LogConfigurator; import org.opensearch.common.logging.Loggers; import org.opensearch.common.network.IfConfig; @@ -78,7 +77,6 @@ import java.security.NoSuchAlgorithmException; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -374,14 +372,6 @@ static void init(final boolean foreground, final Path pidFile, final boolean qui } catch (IOException e) { throw new BootstrapException(e); } - if (JavaVersion.current().compareTo(JavaVersion.parse("11")) < 0) { - final String message = String.format( - Locale.ROOT, - "future versions of OpenSearch will require Java 11; " + "your Java version from [%s] does not meet this requirement", - System.getProperty("java.home") - ); - DeprecationLogger.getLogger(Bootstrap.class).deprecate("java_version_11_required", message); - } if (environment.pidFile() != null) { try { PidFile.create(environment.pidFile(), true); diff --git a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java index 79019a73c69e3..6b75f2306431c 100644 --- a/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java +++ b/server/src/main/java/org/opensearch/bootstrap/BootstrapChecks.java @@ -59,8 +59,6 @@ import java.util.List; import java.util.Locale; import java.util.function.Predicate; -import java.util.regex.Matcher; -import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -224,7 +222,6 @@ static List checks() { checks.add(new OnErrorCheck()); checks.add(new OnOutOfMemoryErrorCheck()); checks.add(new EarlyAccessCheck()); - checks.add(new G1GCCheck()); checks.add(new AllPermissionCheck()); checks.add(new DiscoveryConfiguredCheck()); return Collections.unmodifiableList(checks); @@ -683,60 +680,6 @@ String javaVersion() { } - /** - * Bootstrap check for versions of HotSpot that are known to have issues that can lead to index corruption when G1GC is enabled. - */ - static class G1GCCheck implements BootstrapCheck { - - @Override - public BootstrapCheckResult check(BootstrapContext context) { - if ("Oracle Corporation".equals(jvmVendor()) && isJava8() && isG1GCEnabled()) { - final String jvmVersion = jvmVersion(); - // HotSpot versions on Java 8 match this regular expression; note that this changes with Java 9 after JEP-223 - final Pattern pattern = Pattern.compile("(\\d+)\\.(\\d+)-b\\d+"); - final Matcher matcher = pattern.matcher(jvmVersion); - final boolean matches = matcher.matches(); - assert matches : jvmVersion; - final int major = Integer.parseInt(matcher.group(1)); - final int update = Integer.parseInt(matcher.group(2)); - // HotSpot versions for Java 8 have major version 25, the bad versions are all versions prior to update 40 - if (major == 25 && update < 40) { - final String message = String.format( - Locale.ROOT, - "JVM version [%s] can cause data corruption when used with G1GC; upgrade to at least Java 8u40", - jvmVersion - ); - return BootstrapCheckResult.failure(message); - } - } - return BootstrapCheckResult.success(); - } - - // visible for testing - String jvmVendor() { - return Constants.JVM_VENDOR; - } - - // visible for testing - boolean isG1GCEnabled() { - assert "Oracle Corporation".equals(jvmVendor()); - return JvmInfo.jvmInfo().useG1GC().equals("true"); - } - - // visible for testing - String jvmVersion() { - assert "Oracle Corporation".equals(jvmVendor()); - return Constants.JVM_VERSION; - } - - // visible for testing - boolean isJava8() { - assert "Oracle Corporation".equals(jvmVendor()); - return JavaVersion.current().equals(JavaVersion.parse("1.8")); - } - - } - static class AllPermissionCheck implements BootstrapCheck { @Override diff --git a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java index c59ca1dd60dc7..88f2047ffaa0f 100644 --- a/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/BootstrapChecksTests.java @@ -52,7 +52,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; @@ -654,81 +653,6 @@ String javaVersion() { } - public void testG1GCCheck() throws NodeValidationException { - final AtomicBoolean isG1GCEnabled = new AtomicBoolean(true); - final AtomicBoolean isJava8 = new AtomicBoolean(true); - final AtomicReference jvmVersion = new AtomicReference<>( - String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(0, 39), randomIntBetween(1, 128)) - ); - final BootstrapChecks.G1GCCheck g1GCCheck = new BootstrapChecks.G1GCCheck() { - - @Override - String jvmVendor() { - return "Oracle Corporation"; - } - - @Override - boolean isG1GCEnabled() { - return isG1GCEnabled.get(); - } - - @Override - String jvmVersion() { - return jvmVersion.get(); - } - - @Override - boolean isJava8() { - return isJava8.get(); - } - - }; - - final NodeValidationException e = expectThrows( - NodeValidationException.class, - () -> BootstrapChecks.check(emptyContext, true, Collections.singletonList(g1GCCheck)) - ); - assertThat( - e.getMessage(), - containsString( - "JVM version [" + jvmVersion.get() + "] can cause data corruption when used with G1GC; upgrade to at least Java 8u40" - ) - ); - - // if G1GC is disabled, nothing should happen - isG1GCEnabled.set(false); - BootstrapChecks.check(emptyContext, true, Collections.singletonList(g1GCCheck)); - - // if on or after update 40, nothing should happen independent of whether or not G1GC is enabled - isG1GCEnabled.set(randomBoolean()); - jvmVersion.set(String.format(Locale.ROOT, "25.%d-b%d", randomIntBetween(40, 112), randomIntBetween(1, 128))); - BootstrapChecks.check(emptyContext, true, Collections.singletonList(g1GCCheck)); - - final BootstrapChecks.G1GCCheck nonOracleCheck = new BootstrapChecks.G1GCCheck() { - - @Override - String jvmVendor() { - return randomAlphaOfLength(8); - } - - }; - - // if not on an Oracle JVM, nothing should happen - BootstrapChecks.check(emptyContext, true, Collections.singletonList(nonOracleCheck)); - - final BootstrapChecks.G1GCCheck nonJava8Check = new BootstrapChecks.G1GCCheck() { - - @Override - boolean isJava8() { - return false; - } - - }; - - // if not Java 8, nothing should happen - BootstrapChecks.check(emptyContext, true, Collections.singletonList(nonJava8Check)); - } - public void testAllPermissionCheck() throws NodeValidationException { final AtomicBoolean isAllPermissionGranted = new AtomicBoolean(true); final BootstrapChecks.AllPermissionCheck allPermissionCheck = new BootstrapChecks.AllPermissionCheck() { diff --git a/server/src/test/java/org/opensearch/bootstrap/JavaVersionTests.java b/server/src/test/java/org/opensearch/bootstrap/JavaVersionTests.java index 24d78e0986342..b651afd253d11 100644 --- a/server/src/test/java/org/opensearch/bootstrap/JavaVersionTests.java +++ b/server/src/test/java/org/opensearch/bootstrap/JavaVersionTests.java @@ -34,6 +34,7 @@ import org.opensearch.test.OpenSearchTestCase; +import java.lang.Runtime.Version; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; @@ -41,29 +42,22 @@ public class JavaVersionTests extends OpenSearchTestCase { public void testParse() { - JavaVersion javaVersion = JavaVersion.parse("1.7.0"); - List version = javaVersion.getVersion(); - assertThat(version.size(), is(3)); - assertThat(version.get(0), is(1)); - assertThat(version.get(1), is(7)); - assertThat(version.get(2), is(0)); - - JavaVersion javaVersionEarlyAccess = JavaVersion.parse("14.0.1-ea"); - List version14 = javaVersionEarlyAccess.getVersion(); + Version javaVersionEarlyAccess = Version.parse("14.0.1-ea"); + List version14 = javaVersionEarlyAccess.version(); assertThat(version14.size(), is(3)); assertThat(version14.get(0), is(14)); assertThat(version14.get(1), is(0)); assertThat(version14.get(2), is(1)); - JavaVersion javaVersionOtherPrePart = JavaVersion.parse("13.2.4-somethingElseHere"); - List version13 = javaVersionOtherPrePart.getVersion(); + Version javaVersionOtherPrePart = Version.parse("13.2.4-somethingElseHere"); + List version13 = javaVersionOtherPrePart.version(); assertThat(version13.size(), is(3)); assertThat(version13.get(0), is(13)); assertThat(version13.get(1), is(2)); assertThat(version13.get(2), is(4)); - JavaVersion javaVersionNumericPrePart = JavaVersion.parse("13.2.4-something124443"); - List version11 = javaVersionNumericPrePart.getVersion(); + Version javaVersionNumericPrePart = Version.parse("13.2.4-something124443"); + List version11 = javaVersionNumericPrePart.version(); assertThat(version11.size(), is(3)); assertThat(version11.get(0), is(13)); assertThat(version11.get(1), is(2)); @@ -71,51 +65,36 @@ public void testParse() { } public void testParseInvalidVersions() { - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> JavaVersion.parse("11.2-something-else")); - assertThat(e.getMessage(), equalTo("Java version string [11.2-something-else] could not be parsed.")); - final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> JavaVersion.parse("11.0.")); - assertThat(e1.getMessage(), equalTo("Java version string [11.0.] could not be parsed.")); - final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> JavaVersion.parse("11.a.3")); - assertThat(e2.getMessage(), equalTo("Java version string [11.a.3] could not be parsed.")); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> Version.parse("11.2+something-else")); + assertThat(e.getMessage(), equalTo("Invalid version string: '11.2+something-else'")); + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> Version.parse("11.0.")); + assertThat(e1.getMessage(), equalTo("Invalid version string: '11.0.'")); + final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> Version.parse("11.a.3")); + assertThat(e2.getMessage(), equalTo("Invalid version string: '11.a.3'")); } public void testToString() { - JavaVersion javaVersion170 = JavaVersion.parse("1.7.0"); - assertThat(javaVersion170.toString(), is("1.7.0")); - JavaVersion javaVersion9 = JavaVersion.parse("9"); + Version javaVersion9 = Version.parse("9"); assertThat(javaVersion9.toString(), is("9")); - JavaVersion javaVersion11 = JavaVersion.parse("11.0.1-something09random"); + Version javaVersion11 = Version.parse("11.0.1-something09random"); assertThat(javaVersion11.toString(), is("11.0.1-something09random")); - JavaVersion javaVersion12 = JavaVersion.parse("12.2-2019"); + Version javaVersion12 = Version.parse("12.2-2019"); assertThat(javaVersion12.toString(), is("12.2-2019")); - JavaVersion javaVersion13ea = JavaVersion.parse("13.1-ea"); + Version javaVersion13ea = Version.parse("13.1-ea"); assertThat(javaVersion13ea.toString(), is("13.1-ea")); } public void testCompare() { - JavaVersion onePointSix = JavaVersion.parse("1.6"); - JavaVersion onePointSeven = JavaVersion.parse("1.7"); - JavaVersion onePointSevenPointZero = JavaVersion.parse("1.7.0"); - JavaVersion onePointSevenPointOne = JavaVersion.parse("1.7.1"); - JavaVersion onePointSevenPointTwo = JavaVersion.parse("1.7.2"); - JavaVersion onePointSevenPointOnePointOne = JavaVersion.parse("1.7.1.1"); - JavaVersion onePointSevenPointTwoPointOne = JavaVersion.parse("1.7.2.1"); - JavaVersion thirteen = JavaVersion.parse("13"); - JavaVersion thirteenPointTwoPointOne = JavaVersion.parse("13.2.1"); - JavaVersion thirteenPointTwoPointOneTwoThousand = JavaVersion.parse("13.2.1-2000"); - JavaVersion thirteenPointTwoPointOneThreeThousand = JavaVersion.parse("13.2.1-3000"); - JavaVersion thirteenPointTwoPointOneA = JavaVersion.parse("13.2.1-aaa"); - JavaVersion thirteenPointTwoPointOneB = JavaVersion.parse("13.2.1-bbb"); - JavaVersion fourteen = JavaVersion.parse("14"); - JavaVersion fourteenPointTwoPointOne = JavaVersion.parse("14.2.1"); - JavaVersion fourteenPointTwoPointOneEarlyAccess = JavaVersion.parse("14.2.1-ea"); - - assertTrue(onePointSix.compareTo(onePointSeven) < 0); - assertTrue(onePointSeven.compareTo(onePointSix) > 0); - assertTrue(onePointSix.compareTo(onePointSix) == 0); - assertTrue(onePointSeven.compareTo(onePointSevenPointZero) == 0); - assertTrue(onePointSevenPointOnePointOne.compareTo(onePointSevenPointOne) > 0); - assertTrue(onePointSevenPointTwo.compareTo(onePointSevenPointTwoPointOne) < 0); + Version thirteen = Version.parse("13"); + Version thirteenPointTwoPointOne = Version.parse("13.2.1"); + Version thirteenPointTwoPointOneTwoThousand = Version.parse("13.2.1-2000"); + Version thirteenPointTwoPointOneThreeThousand = Version.parse("13.2.1-3000"); + Version thirteenPointTwoPointOneA = Version.parse("13.2.1-aaa"); + Version thirteenPointTwoPointOneB = Version.parse("13.2.1-bbb"); + Version fourteen = Version.parse("14"); + Version fourteenPointTwoPointOne = Version.parse("14.2.1"); + Version fourteenPointTwoPointOneEarlyAccess = Version.parse("14.2.1-ea"); + assertTrue(thirteen.compareTo(thirteenPointTwoPointOne) < 0); assertTrue(thirteen.compareTo(fourteen) < 0); assertTrue(thirteenPointTwoPointOneThreeThousand.compareTo(thirteenPointTwoPointOneTwoThousand) > 0); @@ -129,20 +108,16 @@ public void testCompare() { } public void testValidVersions() { - String[] versions = new String[] { "1.7", "1.7.0", "0.1.7", "1.7.0.80", "12-ea", "13.0.2.3-ea", "14-something", "11.0.2-21002" }; + String[] versions = new String[] { "12-ea", "13.0.2.3-ea", "14-something", "11.0.2-21002", "11.0.14.1+1", "17.0.2+8" }; for (String version : versions) { - assertTrue(JavaVersion.isValid(version)); + assertNotNull(Version.parse(version)); } } public void testInvalidVersions() { - String[] versions = new String[] { "", "1.7.0_80", "1.7.", "11.2-something-else" }; + String[] versions = new String[] { "", "1.7.0_80", "1.7.", "11.2+something-else" }; for (String version : versions) { - assertFalse(JavaVersion.isValid(version)); + assertThrows(IllegalArgumentException.class, () -> Version.parse(version)); } } - - public void testJava8Compat() { - assertEquals(JavaVersion.parse("1.8"), JavaVersion.parse("8")); - } } diff --git a/server/src/test/java/org/opensearch/common/LocalTimeOffsetTests.java b/server/src/test/java/org/opensearch/common/LocalTimeOffsetTests.java index 12810241e3904..b032e27397f2d 100644 --- a/server/src/test/java/org/opensearch/common/LocalTimeOffsetTests.java +++ b/server/src/test/java/org/opensearch/common/LocalTimeOffsetTests.java @@ -32,12 +32,12 @@ package org.opensearch.common; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.LocalTimeOffset.Gap; import org.opensearch.common.LocalTimeOffset.Overlap; import org.opensearch.common.time.DateFormatter; import org.opensearch.test.OpenSearchTestCase; +import java.lang.Runtime.Version; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; @@ -278,7 +278,7 @@ public void testKnownMovesBackToPreviousDay() { assertKnownMovesBacktoPreviousDay("America/Moncton", "2005-10-29T03:01:00"); assertKnownMovesBacktoPreviousDay("America/St_Johns", "2010-11-07T02:31:00"); assertKnownMovesBacktoPreviousDay("Canada/Newfoundland", "2010-11-07T02:31:00"); - if (JavaVersion.current().compareTo(JavaVersion.parse("11")) > 0) { + if (Runtime.version().compareTo(Version.parse("11")) > 0) { // Added in java 12 assertKnownMovesBacktoPreviousDay("Pacific/Guam", "1969-01-25T13:01:00"); assertKnownMovesBacktoPreviousDay("Pacific/Saipan", "1969-01-25T13:01:00"); diff --git a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java index 30fcf4bb32989..94ddfd7e7f100 100644 --- a/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java +++ b/server/src/test/java/org/opensearch/common/joda/JavaJodaTimeDuellingTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.joda; import org.opensearch.OpenSearchParseException; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.time.DateFormatters; import org.opensearch.common.time.DateMathParser; @@ -43,7 +42,6 @@ import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.ISODateTimeFormat; -import org.junit.BeforeClass; import java.time.ZoneId; import java.time.ZoneOffset; @@ -62,18 +60,6 @@ protected boolean enableWarningsCheck() { return false; } - @BeforeClass - public static void checkJvmProperties() { - boolean runtimeJdk8 = JavaVersion.current().getVersion().get(0) == 8; - assert (runtimeJdk8 && ("SPI,JRE".equals(System.getProperty("java.locale.providers")))) - || (false == runtimeJdk8 && ("SPI,COMPAT".equals(System.getProperty("java.locale.providers")))) - : "`-Djava.locale.providers` needs to be set"; - assumeFalse( - "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - runtimeJdk8 - ); - } - public void testTimezoneParsing() { /** this testcase won't work in joda. See comment in {@link #testPartialTimeParsing()} * assertSameDateAs("2016-11-30T+01", "strict_date_optional_time", "strict_date_optional_time"); @@ -906,14 +892,6 @@ private void assertSamePrinterOutput( String jodaTimeOut = jodaDateFormatter.formatJoda(jodaDate); assertThat(jodaDate.getMillis(), is(javaDate.toInstant().toEpochMilli())); - - if (JavaVersion.current().getVersion().get(0) == 8 - && javaTimeOut.endsWith(".0") - && (format.equals("epoch_second") || format.equals("epoch_millis"))) { - // java 8 has a bug in DateTimeFormatter usage when printing dates that rely on isSupportedBy for fields, which is - // what we use for epoch time. This change accounts for that bug. It should be removed when java 8 support is removed - jodaTimeOut += ".0"; - } String message = String.format( Locale.ROOT, "expected string representation to be equal for format [%s]: joda [%s], java [%s]", diff --git a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java index 1e57f9fe88d9c..681daf1755890 100644 --- a/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java +++ b/server/src/test/java/org/opensearch/common/time/DateFormattersTests.java @@ -32,7 +32,6 @@ package org.opensearch.common.time; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.joda.Joda; import org.opensearch.test.OpenSearchTestCase; @@ -56,11 +55,6 @@ public class DateFormattersTests extends OpenSearchTestCase { public void testWeekBasedDates() { - assumeFalse( - "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); - // as per WeekFields.ISO first week starts on Monday and has minimum 4 days DateFormatter dateFormatter = DateFormatters.forPattern("YYYY-ww"); diff --git a/server/src/test/java/org/opensearch/common/time/JavaDateMathParserTests.java b/server/src/test/java/org/opensearch/common/time/JavaDateMathParserTests.java index a26a3a298b360..504741f56efed 100644 --- a/server/src/test/java/org/opensearch/common/time/JavaDateMathParserTests.java +++ b/server/src/test/java/org/opensearch/common/time/JavaDateMathParserTests.java @@ -33,7 +33,6 @@ package org.opensearch.common.time; import org.opensearch.OpenSearchParseException; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.test.OpenSearchTestCase; import java.time.Instant; @@ -111,11 +110,6 @@ public void testOverridingLocaleOrZoneAndCompositeRoundUpParser() { } public void testWeekDates() { - assumeFalse( - "won't work in jdk8 " + "because SPI mechanism is not looking at classpath - needs ISOCalendarDataProvider in jre's ext/libs", - JavaVersion.current().equals(JavaVersion.parse("8")) - ); - DateFormatter formatter = DateFormatter.forPattern("YYYY-ww"); assertDateMathEquals(formatter.toDateMathParser(), "2016-01", "2016-01-04T23:59:59.999Z", 0, true, ZoneOffset.UTC); diff --git a/server/src/test/java/org/opensearch/index/mapper/DateFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/DateFieldMapperTests.java index cdc5c9567e581..918b86761fe86 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DateFieldMapperTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DateFieldMapperTests.java @@ -34,7 +34,6 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.collect.List; import org.opensearch.common.time.DateFormatter; import org.opensearch.common.xcontent.XContentBuilder; @@ -186,8 +185,6 @@ public void testChangeFormat() throws IOException { } public void testChangeLocale() throws IOException { - assumeTrue("need java 9 for testing ", JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0); - DocumentMapper mapper = createDocumentMapper( fieldMapping(b -> b.field("type", "date").field("format", "E, d MMM yyyy HH:mm:ss Z").field("locale", "de")) ); diff --git a/server/src/test/java/org/opensearch/monitor/jvm/JvmInfoTests.java b/server/src/test/java/org/opensearch/monitor/jvm/JvmInfoTests.java index d86971adcc992..3d02a4797497e 100644 --- a/server/src/test/java/org/opensearch/monitor/jvm/JvmInfoTests.java +++ b/server/src/test/java/org/opensearch/monitor/jvm/JvmInfoTests.java @@ -33,7 +33,6 @@ package org.opensearch.monitor.jvm; import org.apache.lucene.util.Constants; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.test.OpenSearchTestCase; public class JvmInfoTests extends OpenSearchTestCase { @@ -53,14 +52,13 @@ private boolean isG1GCEnabled() { final String argline = System.getProperty("tests.jvm.argline"); final boolean g1GCEnabled = flagIsEnabled(argline, "UseG1GC"); // for JDK 9 the default collector when no collector is specified is G1 GC - final boolean versionIsAtLeastJava9 = JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0; final boolean noOtherCollectorSpecified = argline == null || (!flagIsEnabled(argline, "UseParNewGC") && !flagIsEnabled(argline, "UseParallelGC") && !flagIsEnabled(argline, "UseParallelOldGC") && !flagIsEnabled(argline, "UseSerialGC") && !flagIsEnabled(argline, "UseConcMarkSweepGC")); - return g1GCEnabled || (versionIsAtLeastJava9 && noOtherCollectorSpecified); + return g1GCEnabled || noOtherCollectorSpecified; } private boolean flagIsEnabled(String argline, String flag) { diff --git a/server/src/test/java/org/opensearch/plugins/IndexStorePluginTests.java b/server/src/test/java/org/opensearch/plugins/IndexStorePluginTests.java index 165e3aaf3f171..5fd76fc90568c 100644 --- a/server/src/test/java/org/opensearch/plugins/IndexStorePluginTests.java +++ b/server/src/test/java/org/opensearch/plugins/IndexStorePluginTests.java @@ -32,7 +32,6 @@ package org.opensearch.plugins; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.routing.ShardRouting; import org.opensearch.common.settings.Settings; @@ -134,25 +133,16 @@ public void testDuplicateIndexStoreFactories() { IllegalStateException.class, () -> new MockNode(settings, Arrays.asList(BarStorePlugin.class, FooStorePlugin.class)) ); - if (JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0) { - assertThat( - e, - hasToString( - matches( - "java.lang.IllegalStateException: Duplicate key store \\(attempted merging values " - + "org.opensearch.index.store.FsDirectoryFactory@[\\w\\d]+ " - + "and org.opensearch.index.store.FsDirectoryFactory@[\\w\\d]+\\)" - ) - ) - ); - } else { - assertThat( - e, - hasToString( - matches("java.lang.IllegalStateException: Duplicate key org.opensearch.index.store.FsDirectoryFactory@[\\w\\d]+") + assertThat( + e, + hasToString( + matches( + "java.lang.IllegalStateException: Duplicate key store \\(attempted merging values " + + "org.opensearch.index.store.FsDirectoryFactory@[\\w\\d]+ " + + "and org.opensearch.index.store.FsDirectoryFactory@[\\w\\d]+\\)" ) - ); - } + ) + ); } public void testDuplicateIndexStoreRecoveryStateFactories() { @@ -161,18 +151,6 @@ public void testDuplicateIndexStoreRecoveryStateFactories() { IllegalStateException.class, () -> new MockNode(settings, Arrays.asList(FooCustomRecoveryStore.class, BarCustomRecoveryStore.class)) ); - if (JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0) { - assertThat(e.getMessage(), containsString("Duplicate key recovery-type")); - } else { - assertThat( - e, - hasToString( - matches( - "java.lang.IllegalStateException: Duplicate key " - + "org.opensearch.plugins.IndexStorePluginTests\\$RecoveryFactory@[\\w\\d]+" - ) - ) - ); - } + assertThat(e.getMessage(), containsString("Duplicate key recovery-type")); } } diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index d22776cf01f0e..de8adf3539fe6 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -723,7 +723,7 @@ public void testIncompatibleJavaVersion() throws Exception { "desc", "1.0", Version.CURRENT, - "1000000.0", + "1000000", "FakePlugin", Collections.emptyList(), false diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 1c09fb2ff8c04..8a3a5bcb5bb50 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -70,7 +70,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.DefaultShardOperationFailedException; import org.opensearch.action.support.IndicesOptions; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.client.ClusterAdminClient; @@ -166,6 +165,7 @@ import org.junit.BeforeClass; import java.io.IOException; +import java.lang.Runtime.Version; import java.lang.annotation.Annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; @@ -2403,7 +2403,7 @@ protected boolean willSufferDebian8MemoryProblem() { final boolean anyDebian8Nodes = response.getNodes() .stream() .anyMatch(ni -> ni.getInfo(OsInfo.class).getPrettyName().equals("Debian GNU/Linux 8 (jessie)")); - boolean java15Plus = JavaVersion.current().compareTo(JavaVersion.parse("15")) >= 0; + boolean java15Plus = Runtime.version().compareTo(Version.parse("15")) >= 0; return anyDebian8Nodes && java15Plus == false; } } diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java index 96698036fca55..ecf0de521f36a 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchTestCase.java @@ -62,7 +62,6 @@ import org.apache.lucene.tests.util.TimeUnits; import org.opensearch.Version; import org.opensearch.bootstrap.BootstrapForTesting; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.client.Requests; import org.opensearch.cluster.ClusterModule; import org.opensearch.cluster.metadata.IndexMetadata; @@ -967,17 +966,7 @@ public static TimeZone randomTimeZone() { * generate a random TimeZone from the ones available in java.time */ public static ZoneId randomZone() { - // work around a JDK bug, where java 8 cannot parse the timezone GMT0 back into a temporal accessor - // see https://bugs.openjdk.java.net/browse/JDK-8138664 - if (JavaVersion.current().getVersion().get(0) == 8) { - ZoneId timeZone; - do { - timeZone = ZoneId.of(randomJodaAndJavaSupportedTimezone(JAVA_ZONE_IDS)); - } while (timeZone.equals(ZoneId.of("GMT0"))); - return timeZone; - } else { - return ZoneId.of(randomJodaAndJavaSupportedTimezone(JAVA_ZONE_IDS)); - } + return ZoneId.of(randomJodaAndJavaSupportedTimezone(JAVA_ZONE_IDS)); } /** diff --git a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java index 3d5a906e50836..b2d0705b937e2 100644 --- a/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/opensearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -35,7 +35,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.Constants; -import org.opensearch.bootstrap.JavaVersion; import org.opensearch.common.Strings; import org.opensearch.common.SuppressForbidden; import org.opensearch.test.OpenSearchIntegTestCase; @@ -192,7 +191,7 @@ private ReproduceErrorMessageBuilder appendESProperties() { } appendOpt("tests.locale", Locale.getDefault().toLanguageTag()); appendOpt("tests.timezone", TimeZone.getDefault().getID()); - appendOpt("runtime.java", Integer.toString(JavaVersion.current().getVersion().get(0))); + appendOpt("runtime.java", Integer.toString(Runtime.version().version().get(0))); appendOpt(OpenSearchTestCase.FIPS_SYSPROP, System.getProperty(OpenSearchTestCase.FIPS_SYSPROP)); return this; } diff --git a/test/framework/src/main/java/org/opensearch/test/rest/yaml/Features.java b/test/framework/src/main/java/org/opensearch/test/rest/yaml/Features.java index 6e59e35dcb0b4..10fb1e52259a9 100644 --- a/test/framework/src/main/java/org/opensearch/test/rest/yaml/Features.java +++ b/test/framework/src/main/java/org/opensearch/test/rest/yaml/Features.java @@ -32,8 +32,6 @@ package org.opensearch.test.rest.yaml; -import org.opensearch.bootstrap.JavaVersion; - import java.util.Arrays; import java.util.List; @@ -86,7 +84,7 @@ public static boolean areAllSupported(List features) { } private static boolean isSupported(String feature) { - if (feature.equals(SPI_ON_CLASSPATH_SINCE_JDK_9) && JavaVersion.current().compareTo(JavaVersion.parse("9")) >= 0) { + if (feature.equals(SPI_ON_CLASSPATH_SINCE_JDK_9)) { return true; } return SUPPORTED.contains(feature); From c7c410a06311ea4b1ad61ca1a214ec89c898335a Mon Sep 17 00:00:00 2001 From: Kartik Ganesh Date: Wed, 20 Apr 2022 15:02:25 -0700 Subject: [PATCH 32/41] Refactoring GatedAutoCloseable and moving RecoveryState.Timer (#2965) * Refactoring GatedAutoCloseable to AutoCloseableRefCounted This is a part of the process of merging our feature branch - feature/segment-replication - back into main by re-PRing our changes from the feature branch. GatedAutoCloseable currently wraps a subclass of RefCounted. Segment replication adds another subclass, but this also wraps RefCounted. Both subclasses have the same shutdown hook - decRef. This change makes the superclass less generic to increase code convergence. The breakdown of the plan to merge segment-replication to main is detailed in #2355 Segment replication design proposal - #2229 Signed-off-by: Kartik Ganesh * Minor refactoring in RecoveryState This change makes two minor updates to RecoveryState - 1. The readRecoveryState API is removed because it can be replaced by an invocation of the constructor 2. The class members of the Timer inner class are changed to private, and accesses are only through the public APIs Signed-off-by: Kartik Ganesh * Update RecoveryTargetTests to test Timer subclasses deterministically This change removes the use of RandomBoolean in testing the Timer classes and creates a dedicated unit test for each. The common test logic is shared via a private method. Signed-off-by: Kartik Ganesh * Move the RecoveryState.Timer class to a top-level class This will eventually be reused across both replication use-cases - peer recovery and segment replication. Signed-off-by: Kartik Ganesh * Further update of timer tests in RecoveryTargetTests Removes a non-deterministic code path around stopping the timer, and avoids assertThat (deprecated) Signed-off-by: Kartik Ganesh * Rename to ReplicationTimer Signed-off-by: Kartik Ganesh * Remove RecoveryTargetTests assert on a running timer Trying to serialize and deserialize a running Timer instance, and then checking for equality leads to flaky test failures when the ser/deser takes time. Signed-off-by: Kartik Ganesh --- .../recovery/TransportRecoveryAction.java | 2 +- ...able.java => AutoCloseableRefCounted.java} | 15 +- .../common/concurrent/GatedCloseable.java | 2 +- .../recovery/PeerRecoveryTargetService.java | 7 +- .../recovery/RecoveriesCollection.java | 6 +- .../indices/recovery/RecoveryState.java | 96 ++----------- .../replication/common/ReplicationTimer.java | 97 +++++++++++++ ...java => AutoCloseableRefCountedTests.java} | 21 +-- .../indices/recovery/RecoveryTargetTests.java | 129 +++++++++--------- .../action/cat/RestRecoveryActionTests.java | 3 +- 10 files changed, 206 insertions(+), 172 deletions(-) rename server/src/main/java/org/opensearch/common/concurrent/{GatedAutoCloseable.java => AutoCloseableRefCounted.java} (57%) create mode 100644 server/src/main/java/org/opensearch/indices/replication/common/ReplicationTimer.java rename server/src/test/java/org/opensearch/common/concurrent/{GatedAutoCloseableTests.java => AutoCloseableRefCountedTests.java} (50%) diff --git a/server/src/main/java/org/opensearch/action/admin/indices/recovery/TransportRecoveryAction.java b/server/src/main/java/org/opensearch/action/admin/indices/recovery/TransportRecoveryAction.java index dd5ae31c01e56..7c3666e44f093 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -87,7 +87,7 @@ public TransportRecoveryAction( @Override protected RecoveryState readShardResult(StreamInput in) throws IOException { - return RecoveryState.readRecoveryState(in); + return new RecoveryState(in); } @Override diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/AutoCloseableRefCounted.java similarity index 57% rename from server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java rename to server/src/main/java/org/opensearch/common/concurrent/AutoCloseableRefCounted.java index cb819c0320e91..795d352542881 100644 --- a/server/src/main/java/org/opensearch/common/concurrent/GatedAutoCloseable.java +++ b/server/src/main/java/org/opensearch/common/concurrent/AutoCloseableRefCounted.java @@ -13,20 +13,19 @@ package org.opensearch.common.concurrent; +import org.opensearch.common.util.concurrent.RefCounted; + /** - * Decorator class that wraps an object reference with a {@link Runnable} that is - * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures - * that this is invoked only once. See also {@link GatedCloseable} + * Adapter class that enables a {@link RefCounted} implementation to function like an {@link AutoCloseable}. + * The {@link #close()} API invokes {@link RefCounted#decRef()} and ensures idempotency using a {@link OneWayGate}. */ -public class GatedAutoCloseable implements AutoCloseable { +public class AutoCloseableRefCounted implements AutoCloseable { private final T ref; - private final Runnable onClose; private final OneWayGate gate; - public GatedAutoCloseable(T ref, Runnable onClose) { + public AutoCloseableRefCounted(T ref) { this.ref = ref; - this.onClose = onClose; gate = new OneWayGate(); } @@ -37,7 +36,7 @@ public T get() { @Override public void close() { if (gate.close()) { - onClose.run(); + ref.decRef(); } } } diff --git a/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java index d98e4cca8d561..467b5e4cfb3ea 100644 --- a/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java +++ b/server/src/main/java/org/opensearch/common/concurrent/GatedCloseable.java @@ -21,7 +21,7 @@ /** * Decorator class that wraps an object reference with a {@link CheckedRunnable} that is * invoked when {@link #close()} is called. The internal {@link OneWayGate} instance ensures - * that this is invoked only once. See also {@link GatedAutoCloseable} + * that this is invoked only once. See also {@link AutoCloseableRefCounted} */ public class GatedCloseable implements Closeable { diff --git a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java index d7c3421b1de93..9348988f8edcc 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/opensearch/indices/recovery/PeerRecoveryTargetService.java @@ -70,6 +70,7 @@ import org.opensearch.index.translog.Translog; import org.opensearch.index.translog.TranslogCorruptedException; import org.opensearch.indices.recovery.RecoveriesCollection.RecoveryRef; +import org.opensearch.indices.replication.common.ReplicationTimer; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.ConnectTransportException; @@ -215,7 +216,7 @@ private void doRecovery(final long recoveryId, final StartRecoveryRequest preExi final String actionName; final TransportRequest requestToSend; final StartRecoveryRequest startRequest; - final RecoveryState.Timer timer; + final ReplicationTimer timer; try (RecoveryRef recoveryRef = onGoingRecoveries.getRecovery(recoveryId)) { if (recoveryRef == null) { logger.trace("not running recovery with id [{}] - can not find it (probably finished)", recoveryId); @@ -622,9 +623,9 @@ private class RecoveryResponseHandler implements TransportResponseHandler { + public static class RecoveryRef extends AutoCloseableRefCounted { /** * Important: {@link RecoveryTarget#tryIncRef()} should * be *successfully* called on status before */ public RecoveryRef(RecoveryTarget status) { - super(status, status::decRef); + super(status); status.setLastAccessTime(); } } diff --git a/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java b/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java index d89d59e2f2c1b..9f57a0ebd4d0f 100644 --- a/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java +++ b/server/src/main/java/org/opensearch/indices/recovery/RecoveryState.java @@ -50,6 +50,7 @@ import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.ShardId; import org.opensearch.index.store.StoreStats; +import org.opensearch.indices.replication.common.ReplicationTimer; import java.io.IOException; import java.util.ArrayList; @@ -122,7 +123,7 @@ public static Stage fromId(byte id) { private final Index index; private final Translog translog; private final VerifyIndex verifyIndex; - private final Timer timer; + private final ReplicationTimer timer; private RecoverySource recoverySource; private ShardId shardId; @@ -149,12 +150,12 @@ public RecoveryState(ShardRouting shardRouting, DiscoveryNode targetNode, @Nulla this.index = index; translog = new Translog(); verifyIndex = new VerifyIndex(); - timer = new Timer(); + timer = new ReplicationTimer(); timer.start(); } public RecoveryState(StreamInput in) throws IOException { - timer = new Timer(in); + timer = new ReplicationTimer(in); stage = Stage.fromId(in.readByte()); shardId = new ShardId(in); recoverySource = RecoverySource.readFrom(in); @@ -256,7 +257,7 @@ public Translog getTranslog() { return translog; } - public Timer getTimer() { + public ReplicationTimer getTimer() { return timer; } @@ -280,10 +281,6 @@ public boolean getPrimary() { return primary; } - public static RecoveryState readRecoveryState(StreamInput in) throws IOException { - return new RecoveryState(in); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { @@ -291,9 +288,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(Fields.TYPE, recoverySource.getType()); builder.field(Fields.STAGE, stage.toString()); builder.field(Fields.PRIMARY, primary); - builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime); - if (timer.stopTime > 0) { - builder.timeField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime); + builder.timeField(Fields.START_TIME_IN_MILLIS, Fields.START_TIME, timer.startTime()); + if (timer.stopTime() > 0) { + builder.timeField(Fields.STOP_TIME_IN_MILLIS, Fields.STOP_TIME, timer.stopTime()); } builder.humanReadableField(Fields.TOTAL_TIME_IN_MILLIS, Fields.TOTAL_TIME, new TimeValue(timer.time())); @@ -375,78 +372,7 @@ static final class Fields { static final String TARGET_THROTTLE_TIME_IN_MILLIS = "target_throttle_time_in_millis"; } - public static class Timer implements Writeable { - protected long startTime = 0; - protected long startNanoTime = 0; - protected long time = -1; - protected long stopTime = 0; - - public Timer() {} - - public Timer(StreamInput in) throws IOException { - startTime = in.readVLong(); - startNanoTime = in.readVLong(); - stopTime = in.readVLong(); - time = in.readVLong(); - } - - @Override - public synchronized void writeTo(StreamOutput out) throws IOException { - out.writeVLong(startTime); - out.writeVLong(startNanoTime); - out.writeVLong(stopTime); - // write a snapshot of current time, which is not per se the time field - out.writeVLong(time()); - } - - public synchronized void start() { - assert startTime == 0 : "already started"; - startTime = System.currentTimeMillis(); - startNanoTime = System.nanoTime(); - } - - /** Returns start time in millis */ - public synchronized long startTime() { - return startTime; - } - - /** Returns elapsed time in millis, or 0 if timer was not started */ - public synchronized long time() { - if (startNanoTime == 0) { - return 0; - } - if (time >= 0) { - return time; - } - return Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startNanoTime)); - } - - /** Returns stop time in millis */ - public synchronized long stopTime() { - return stopTime; - } - - public synchronized void stop() { - assert stopTime == 0 : "already stopped"; - stopTime = Math.max(System.currentTimeMillis(), startTime); - time = TimeValue.nsecToMSec(System.nanoTime() - startNanoTime); - assert time >= 0; - } - - public synchronized void reset() { - startTime = 0; - startNanoTime = 0; - time = -1; - stopTime = 0; - } - - // for tests - public long getStartNanoTime() { - return startNanoTime; - } - } - - public static class VerifyIndex extends Timer implements ToXContentFragment, Writeable { + public static class VerifyIndex extends ReplicationTimer implements ToXContentFragment, Writeable { private volatile long checkIndexTime; public VerifyIndex() {} @@ -483,7 +409,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } - public static class Translog extends Timer implements ToXContentFragment, Writeable { + public static class Translog extends ReplicationTimer implements ToXContentFragment, Writeable { public static final int UNKNOWN = -1; private int recovered; @@ -819,7 +745,7 @@ public boolean isComplete() { } } - public static class Index extends Timer implements ToXContentFragment, Writeable { + public static class Index extends ReplicationTimer implements ToXContentFragment, Writeable { private final RecoveryFilesDetails fileDetails; public static final long UNKNOWN = -1L; diff --git a/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTimer.java b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTimer.java new file mode 100644 index 0000000000000..976df28265d9a --- /dev/null +++ b/server/src/main/java/org/opensearch/indices/replication/common/ReplicationTimer.java @@ -0,0 +1,97 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.indices.replication.common; + +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.unit.TimeValue; + +import java.io.IOException; + +/** + * A serializable timer that is used to measure the time taken for + * file replication operations like recovery. + */ +public class ReplicationTimer implements Writeable { + private long startTime = 0; + private long startNanoTime = 0; + private long time = -1; + private long stopTime = 0; + + public ReplicationTimer() {} + + public ReplicationTimer(StreamInput in) throws IOException { + startTime = in.readVLong(); + startNanoTime = in.readVLong(); + stopTime = in.readVLong(); + time = in.readVLong(); + } + + @Override + public synchronized void writeTo(StreamOutput out) throws IOException { + out.writeVLong(startTime); + out.writeVLong(startNanoTime); + out.writeVLong(stopTime); + // write a snapshot of current time, which is not per se the time field + out.writeVLong(time()); + } + + public synchronized void start() { + assert startTime == 0 : "already started"; + startTime = System.currentTimeMillis(); + startNanoTime = System.nanoTime(); + } + + /** + * Returns start time in millis + */ + public synchronized long startTime() { + return startTime; + } + + /** + * Returns elapsed time in millis, or 0 if timer was not started + */ + public synchronized long time() { + if (startNanoTime == 0) { + return 0; + } + if (time >= 0) { + return time; + } + return Math.max(0, TimeValue.nsecToMSec(System.nanoTime() - startNanoTime)); + } + + /** + * Returns stop time in millis + */ + public synchronized long stopTime() { + return stopTime; + } + + public synchronized void stop() { + assert stopTime == 0 : "already stopped"; + stopTime = Math.max(System.currentTimeMillis(), startTime); + time = TimeValue.nsecToMSec(System.nanoTime() - startNanoTime); + assert time >= 0; + } + + public synchronized void reset() { + startTime = 0; + startNanoTime = 0; + time = -1; + stopTime = 0; + } + + // only used in tests + public long getStartNanoTime() { + return startNanoTime; + } +} diff --git a/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java b/server/src/test/java/org/opensearch/common/concurrent/AutoCloseableRefCountedTests.java similarity index 50% rename from server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java rename to server/src/test/java/org/opensearch/common/concurrent/AutoCloseableRefCountedTests.java index 63058da8f163a..344368988f5ff 100644 --- a/server/src/test/java/org/opensearch/common/concurrent/GatedAutoCloseableTests.java +++ b/server/src/test/java/org/opensearch/common/concurrent/AutoCloseableRefCountedTests.java @@ -14,33 +14,36 @@ package org.opensearch.common.concurrent; import org.junit.Before; +import org.opensearch.common.util.concurrent.RefCounted; import org.opensearch.test.OpenSearchTestCase; -import java.util.concurrent.atomic.AtomicInteger; +import static org.mockito.Mockito.atMostOnce; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; -public class GatedAutoCloseableTests extends OpenSearchTestCase { +public class AutoCloseableRefCountedTests extends OpenSearchTestCase { - private AtomicInteger testRef; - private GatedAutoCloseable testObject; + private RefCounted mockRefCounted; + private AutoCloseableRefCounted testObject; @Before public void setup() { - testRef = new AtomicInteger(0); - testObject = new GatedAutoCloseable<>(testRef, testRef::incrementAndGet); + mockRefCounted = mock(RefCounted.class); + testObject = new AutoCloseableRefCounted<>(mockRefCounted); } public void testGet() { - assertEquals(0, testObject.get().get()); + assertEquals(mockRefCounted, testObject.get()); } public void testClose() { testObject.close(); - assertEquals(1, testObject.get().get()); + verify(mockRefCounted, atMostOnce()).decRef(); } public void testIdempotent() { testObject.close(); testObject.close(); - assertEquals(1, testObject.get().get()); + verify(mockRefCounted, atMostOnce()).decRef(); } } diff --git a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTargetTests.java b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTargetTests.java index 5d0d9bca8b3fb..dd4b17fbac5de 100644 --- a/server/src/test/java/org/opensearch/indices/recovery/RecoveryTargetTests.java +++ b/server/src/test/java/org/opensearch/indices/recovery/RecoveryTargetTests.java @@ -44,9 +44,9 @@ import org.opensearch.indices.recovery.RecoveryState.FileDetail; import org.opensearch.indices.recovery.RecoveryState.Index; import org.opensearch.indices.recovery.RecoveryState.Stage; -import org.opensearch.indices.recovery.RecoveryState.Timer; import org.opensearch.indices.recovery.RecoveryState.Translog; import org.opensearch.indices.recovery.RecoveryState.VerifyIndex; +import org.opensearch.indices.replication.common.ReplicationTimer; import org.opensearch.test.OpenSearchTestCase; import java.io.IOException; @@ -63,9 +63,7 @@ import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.startsWith; @@ -124,72 +122,81 @@ public void run() { } } - public void testTimers() throws Throwable { - final Timer timer; - Streamer streamer; + public void testTimer() throws Throwable { AtomicBoolean stop = new AtomicBoolean(); - if (randomBoolean()) { - timer = new Timer(); - streamer = new Streamer(stop, timer) { - @Override - Timer createObj(StreamInput in) throws IOException { - return new Timer(in); - } - }; - } else if (randomBoolean()) { - timer = new Index(); - streamer = new Streamer(stop, timer) { - @Override - Timer createObj(StreamInput in) throws IOException { - return new Index(in); - } - }; - } else if (randomBoolean()) { - timer = new VerifyIndex(); - streamer = new Streamer(stop, timer) { - @Override - Timer createObj(StreamInput in) throws IOException { - return new VerifyIndex(in); - } - }; - } else { - timer = new Translog(); - streamer = new Streamer(stop, timer) { - @Override - Timer createObj(StreamInput in) throws IOException { - return new Translog(in); - } - }; - } + final ReplicationTimer timer = new ReplicationTimer(); + Streamer streamer = new Streamer<>(stop, timer) { + @Override + ReplicationTimer createObj(StreamInput in) throws IOException { + return new ReplicationTimer(in); + } + }; + doTimerTest(timer, streamer); + } + + public void testIndexTimer() throws Throwable { + AtomicBoolean stop = new AtomicBoolean(); + Index index = new Index(); + Streamer streamer = new Streamer<>(stop, index) { + @Override + Index createObj(StreamInput in) throws IOException { + return new Index(in); + } + }; + doTimerTest(index, streamer); + } + public void testVerifyIndexTimer() throws Throwable { + AtomicBoolean stop = new AtomicBoolean(); + VerifyIndex verifyIndex = new VerifyIndex(); + Streamer streamer = new Streamer<>(stop, verifyIndex) { + @Override + VerifyIndex createObj(StreamInput in) throws IOException { + return new VerifyIndex(in); + } + }; + doTimerTest(verifyIndex, streamer); + } + + public void testTranslogTimer() throws Throwable { + AtomicBoolean stop = new AtomicBoolean(); + Translog translog = new Translog(); + Streamer streamer = new Streamer<>(stop, translog) { + @Override + Translog createObj(StreamInput in) throws IOException { + return new Translog(in); + } + }; + doTimerTest(translog, streamer); + } + + private void doTimerTest(ReplicationTimer timer, Streamer streamer) throws Exception { timer.start(); - assertThat(timer.startTime(), greaterThan(0L)); - assertThat(timer.stopTime(), equalTo(0L)); - Timer lastRead = streamer.serializeDeserialize(); + assertTrue(timer.startTime() > 0); + assertEquals(0, timer.stopTime()); + ReplicationTimer lastRead = streamer.serializeDeserialize(); final long time = lastRead.time(); - assertThat(time, lessThanOrEqualTo(timer.time())); - assertBusy(() -> assertThat("timer timer should progress compared to captured one ", time, lessThan(timer.time()))); - assertThat("captured time shouldn't change", lastRead.time(), equalTo(time)); + assertBusy(() -> assertTrue("timer timer should progress compared to captured one ", time < timer.time())); + assertEquals("captured time shouldn't change", time, lastRead.time()); - if (randomBoolean()) { - timer.stop(); - assertThat(timer.stopTime(), greaterThanOrEqualTo(timer.startTime())); - assertThat(timer.time(), greaterThan(0L)); - lastRead = streamer.serializeDeserialize(); - assertThat(lastRead.startTime(), equalTo(timer.startTime())); - assertThat(lastRead.time(), equalTo(timer.time())); - assertThat(lastRead.stopTime(), equalTo(timer.stopTime())); - } + timer.stop(); + assertTrue(timer.stopTime() >= timer.startTime()); + assertTrue(timer.time() > 0); + // validate captured time + lastRead = streamer.serializeDeserialize(); + assertEquals(timer.startTime(), lastRead.startTime()); + assertEquals(timer.time(), lastRead.time()); + assertEquals(timer.stopTime(), lastRead.stopTime()); timer.reset(); - assertThat(timer.startTime(), equalTo(0L)); - assertThat(timer.time(), equalTo(0L)); - assertThat(timer.stopTime(), equalTo(0L)); + assertEquals(0, timer.startTime()); + assertEquals(0, timer.time()); + assertEquals(0, timer.stopTime()); + // validate captured time lastRead = streamer.serializeDeserialize(); - assertThat(lastRead.startTime(), equalTo(0L)); - assertThat(lastRead.time(), equalTo(0L)); - assertThat(lastRead.stopTime(), equalTo(0L)); - + assertEquals(0, lastRead.startTime()); + assertEquals(0, lastRead.time()); + assertEquals(0, lastRead.stopTime()); } public void testIndex() throws Throwable { diff --git a/server/src/test/java/org/opensearch/rest/action/cat/RestRecoveryActionTests.java b/server/src/test/java/org/opensearch/rest/action/cat/RestRecoveryActionTests.java index 7966d2961c29a..e7eb9cbf24015 100644 --- a/server/src/test/java/org/opensearch/rest/action/cat/RestRecoveryActionTests.java +++ b/server/src/test/java/org/opensearch/rest/action/cat/RestRecoveryActionTests.java @@ -45,6 +45,7 @@ import org.opensearch.index.Index; import org.opensearch.index.shard.ShardId; import org.opensearch.indices.recovery.RecoveryState; +import org.opensearch.indices.replication.common.ReplicationTimer; import org.opensearch.test.OpenSearchTestCase; import java.util.ArrayList; @@ -72,7 +73,7 @@ public void testRestRecoveryAction() { for (int i = 0; i < successfulShards; i++) { final RecoveryState state = mock(RecoveryState.class); when(state.getShardId()).thenReturn(new ShardId(new Index("index", "_na_"), i)); - final RecoveryState.Timer timer = mock(RecoveryState.Timer.class); + final ReplicationTimer timer = mock(ReplicationTimer.class); final long startTime = randomLongBetween(0, new Date().getTime()); when(timer.startTime()).thenReturn(startTime); final long time = randomLongBetween(1000000, 10 * 1000000); From a34d11f15a2c0f14ac8ebda4d85da92000719b7f Mon Sep 17 00:00:00 2001 From: Wenjun Ruan Date: Thu, 21 Apr 2022 11:14:44 +0800 Subject: [PATCH 33/41] Remove usages of MultiTermQuery.setRewriteMethodsetRewriteMethod (#2997) Remove usages of MultiTermQuery.setRewriteMethod which is removed in latest versions of Lucene. Signed-off-by: ruanwenjun --- .../mapper/SearchAsYouTypeFieldMapper.java | 4 +- .../search/query/QueryPhaseTests.java | 12 ++--- .../lucene/search/AutomatonQueries.java | 48 +++++++++++++++---- .../index/mapper/StringFieldType.java | 44 ++++++++--------- .../index/mapper/TextFieldMapper.java | 5 +- .../index/query/RegexpQueryBuilder.java | 13 ++--- .../index/search/QueryStringQueryParser.java | 11 +++-- .../deps/lucene/VectorHighlighterTests.java | 2 +- .../search/query/QueryPhaseTests.java | 15 +++--- 9 files changed, 89 insertions(+), 65 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java index 7394993448bbf..68b887c4c4a43 100644 --- a/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/opensearch/index/mapper/SearchAsYouTypeFieldMapper.java @@ -60,6 +60,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.Operations; import org.opensearch.common.collect.Iterators; +import org.opensearch.common.lucene.search.AutomatonQueries; import org.opensearch.index.analysis.AnalyzerScope; import org.opensearch.index.analysis.IndexAnalyzers; import org.opensearch.index.analysis.NamedAnalyzer; @@ -431,8 +432,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool automata.add(Automata.makeAnyChar()); } Automaton automaton = Operations.concatenate(automata); - AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); - query.setRewriteMethod(method); + AutomatonQuery query = AutomatonQueries.createAutomatonQuery(new Term(name(), value + "*"), automaton, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 83a0a63a6a5c8..74cd4754efe44 100644 --- a/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -33,7 +33,6 @@ package org.opensearch.search.query; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field.Store; @@ -52,6 +51,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; +import org.apache.lucene.queries.spans.SpanNearQuery; +import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; @@ -82,8 +83,6 @@ import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; -import org.apache.lucene.queries.spans.SpanNearQuery; -import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; @@ -122,7 +121,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import static org.opensearch.search.query.TopDocsCollectorContext.hasInfMaxScore; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -130,8 +128,9 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; +import static org.opensearch.search.query.TopDocsCollectorContext.hasInfMaxScore; public class QueryPhaseTests extends IndexShardTestCase { @@ -1114,8 +1113,7 @@ public void testCancellationDuringPreprocess() throws IOException { indexShard, newContextSearcher(reader, executor) ); - PrefixQuery prefixQuery = new PrefixQuery(new Term("foo", "a")); - prefixQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE); + PrefixQuery prefixQuery = new PrefixQuery(new Term("foo", "a"), MultiTermQuery.SCORING_BOOLEAN_REWRITE); context.parsedQuery(new ParsedQuery(prefixQuery)); SearchShardTask task = mock(SearchShardTask.class); when(task.isCancelled()).thenReturn(true); diff --git a/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java index 9755a99fecc08..12dec26090b48 100644 --- a/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/opensearch/common/lucene/search/AutomatonQueries.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.AutomatonQuery; +import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; @@ -63,29 +64,58 @@ public static Automaton caseInsensitivePrefix(String s) { return a; } - /** Build an automaton query accepting all terms with the specified prefix, ASCII case insensitive. */ + /** + * Build an automaton query accepting all terms with the specified prefix, ASCII case insensitive. + */ public static AutomatonQuery caseInsensitivePrefixQuery(Term prefix) { - return new AutomatonQuery(prefix, caseInsensitivePrefix(prefix.text())); + return caseInsensitivePrefixQuery(prefix, MultiTermQuery.CONSTANT_SCORE_REWRITE); } - /** Build an automaton accepting all terms ASCII case insensitive. */ + /** + * Build an automaton query accepting all terms with the specified prefix, ASCII case insensitive. + */ + public static AutomatonQuery caseInsensitivePrefixQuery(Term prefix, MultiTermQuery.RewriteMethod method) { + return createAutomatonQuery(prefix, caseInsensitivePrefix(prefix.text()), method); + } + + /** + * Build an automaton accepting all terms ASCII case insensitive. + */ public static AutomatonQuery caseInsensitiveTermQuery(Term term) { BytesRef prefix = term.bytes(); return new AutomatonQuery(term, toCaseInsensitiveString(prefix, Integer.MAX_VALUE)); } - /** Build an automaton matching a wildcard pattern, ASCII case insensitive. */ - public static AutomatonQuery caseInsensitiveWildcardQuery(Term wildcardquery) { - return new AutomatonQuery(wildcardquery, toCaseInsensitiveWildcardAutomaton(wildcardquery, Integer.MAX_VALUE)); + /** + * Build an automaton matching a wildcard pattern, ASCII case insensitive, if the method is null, then will use {@link MultiTermQuery#CONSTANT_SCORE_REWRITE}. + */ + public static AutomatonQuery caseInsensitiveWildcardQuery(Term wildcardquery, MultiTermQuery.RewriteMethod method) { + return createAutomatonQuery(wildcardquery, toCaseInsensitiveWildcardAutomaton(wildcardquery, Integer.MAX_VALUE), method); + } + + /** + * Build an automaton matching a given pattern with rewrite method, if the rewrite method is null, then will use {@link MultiTermQuery#CONSTANT_SCORE_REWRITE}. + */ + public static AutomatonQuery createAutomatonQuery(Term term, Automaton automaton, MultiTermQuery.RewriteMethod method) { + if (method == null) { + method = MultiTermQuery.CONSTANT_SCORE_REWRITE; + } + return new AutomatonQuery(term, automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); } - /** String equality with support for wildcards */ + /** + * String equality with support for wildcards + */ public static final char WILDCARD_STRING = '*'; - /** Char equality with support for wildcards */ + /** + * Char equality with support for wildcards + */ public static final char WILDCARD_CHAR = '?'; - /** Escape character */ + /** + * Escape character + */ public static final char WILDCARD_ESCAPE = '\\'; /** diff --git a/server/src/main/java/org/opensearch/index/mapper/StringFieldType.java b/server/src/main/java/org/opensearch/index/mapper/StringFieldType.java index 42bd52dddf520..9aa6bf09a1176 100644 --- a/server/src/main/java/org/opensearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/opensearch/index/mapper/StringFieldType.java @@ -34,7 +34,6 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.index.Term; -import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.PrefixQuery; @@ -44,12 +43,12 @@ import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.automaton.Operations; import org.opensearch.OpenSearchException; import org.opensearch.common.lucene.BytesRefs; import org.opensearch.common.lucene.search.AutomatonQueries; import org.opensearch.common.unit.Fuzziness; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.support.QueryParsers; import java.util.Map; import java.util.regex.Matcher; @@ -111,19 +110,13 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool ); } failIfNotIndexed(); - if (caseInsensitive) { - AutomatonQuery query = AutomatonQueries.caseInsensitivePrefixQuery((new Term(name(), indexedValueForSearch(value)))); - if (method != null) { - query.setRewriteMethod(method); - } - return query; - + if (method == null) { + method = MultiTermQuery.CONSTANT_SCORE_REWRITE; } - PrefixQuery query = new PrefixQuery(new Term(name(), indexedValueForSearch(value))); - if (method != null) { - query.setRewriteMethod(method); + if (caseInsensitive) { + return AutomatonQueries.caseInsensitivePrefixQuery((new Term(name(), indexedValueForSearch(value))), method); } - return query; + return new PrefixQuery(new Term(name(), indexedValueForSearch(value)), method); } public static final String normalizeWildcardPattern(String fieldname, String value, Analyzer normalizer) { @@ -173,13 +166,12 @@ public Query wildcardQuery(String value, MultiTermQuery.RewriteMethod method, bo term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - AutomatonQuery query = AutomatonQueries.caseInsensitiveWildcardQuery(term); - QueryParsers.setRewriteMethod(query, method); - return query; + return AutomatonQueries.caseInsensitiveWildcardQuery(term, method); } - WildcardQuery query = new WildcardQuery(term); - QueryParsers.setRewriteMethod(query, method); - return query; + if (method == null) { + method = MultiTermQuery.CONSTANT_SCORE_REWRITE; + } + return new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } @Override @@ -197,11 +189,17 @@ public Query regexpQuery( ); } failIfNotIndexed(); - RegexpQuery query = new RegexpQuery(new Term(name(), indexedValueForSearch(value)), syntaxFlags, matchFlags, maxDeterminizedStates); - if (method != null) { - query.setRewriteMethod(method); + if (method == null) { + method = MultiTermQuery.CONSTANT_SCORE_REWRITE; } - return query; + return new RegexpQuery( + new Term(name(), indexedValueForSearch(value)), + syntaxFlags, + matchFlags, + RegexpQuery.DEFAULT_PROVIDER, + maxDeterminizedStates, + method + ); } @Override diff --git a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java index 4b2c20586834d..360240eb9d59c 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TextFieldMapper.java @@ -582,10 +582,7 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool automata.add(Automata.makeAnyChar()); } Automaton automaton = Operations.concatenate(automata); - AutomatonQuery query = new AutomatonQuery(new Term(name(), value + "*"), automaton); - if (method != null) { - query.setRewriteMethod(method); - } + AutomatonQuery query = AutomatonQueries.createAutomatonQuery(new Term(name(), value + "*"), automaton, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/main/java/org/opensearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/opensearch/index/query/RegexpQueryBuilder.java index dc6546a3fd3a2..c8192557ef266 100644 --- a/server/src/main/java/org/opensearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/opensearch/index/query/RegexpQueryBuilder.java @@ -309,16 +309,17 @@ protected Query doToQuery(QueryShardContext context) throws QueryShardException, query = fieldType.regexpQuery(value, sanitisedSyntaxFlag, matchFlagsValue, maxDeterminizedStates, method, context); } if (query == null) { - RegexpQuery regexpQuery = new RegexpQuery( + if (method == null) { + method = MultiTermQuery.CONSTANT_SCORE_REWRITE; + } + query = new RegexpQuery( new Term(fieldName, BytesRefs.toBytesRef(value)), sanitisedSyntaxFlag, matchFlagsValue, - maxDeterminizedStates + RegexpQuery.DEFAULT_PROVIDER, + maxDeterminizedStates, + method ); - if (method != null) { - regexpQuery.setRewriteMethod(method); - } - query = regexpQuery; } return query; } diff --git a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java index e9437f5704851..980a42163c9c9 100644 --- a/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/opensearch/index/search/QueryStringQueryParser.java @@ -70,7 +70,6 @@ import org.opensearch.index.query.ExistsQueryBuilder; import org.opensearch.index.query.MultiMatchQueryBuilder; import org.opensearch.index.query.QueryShardContext; -import org.opensearch.index.query.support.QueryParsers; import java.io.IOException; import java.time.ZoneId; @@ -110,7 +109,7 @@ public class QueryStringQueryParser extends XQueryParser { private ZoneId timeZone; private Fuzziness fuzziness = Fuzziness.AUTO; private int fuzzyMaxExpansions = FuzzyQuery.defaultMaxExpansions; - private MultiTermQuery.RewriteMethod fuzzyRewriteMethod; + private MultiTermQuery.RewriteMethod fuzzyRewriteMethod = MultiTermQuery.CONSTANT_SCORE_REWRITE; private boolean fuzzyTranspositions = FuzzyQuery.defaultTranspositions; /** @@ -527,9 +526,11 @@ private Query getFuzzyQuerySingle(String field, String termStr, float minSimilar @Override protected Query newFuzzyQuery(Term term, float minimumSimilarity, int prefixLength) { int numEdits = Fuzziness.build(minimumSimilarity).asDistance(term.text()); - FuzzyQuery query = new FuzzyQuery(term, numEdits, prefixLength, fuzzyMaxExpansions, fuzzyTranspositions); - QueryParsers.setRewriteMethod(query, fuzzyRewriteMethod); - return query; + if (fuzzyRewriteMethod != null) { + return new FuzzyQuery(term, numEdits, prefixLength, fuzzyMaxExpansions, fuzzyTranspositions, fuzzyRewriteMethod); + } else { + return new FuzzyQuery(term, numEdits, prefixLength, fuzzyMaxExpansions, fuzzyTranspositions); + } } @Override diff --git a/server/src/test/java/org/opensearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/opensearch/deps/lucene/VectorHighlighterTests.java index e3a4c8a3e890d..e91da4f5ee46e 100644 --- a/server/src/test/java/org/opensearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/opensearch/deps/lucene/VectorHighlighterTests.java @@ -121,7 +121,7 @@ public void testVectorHighlighterPrefixQuery() throws Exception { ); assertThat(fragment, nullValue()); - prefixQuery.setRewriteMethod(PrefixQuery.SCORING_BOOLEAN_REWRITE); + prefixQuery = new PrefixQuery(new Term("content", "ba"), PrefixQuery.SCORING_BOOLEAN_REWRITE); Query rewriteQuery = prefixQuery.rewrite(reader); fragment = highlighter.getBestFragment(highlighter.getFieldQuery(rewriteQuery), reader, topDocs.scoreDocs[0].doc, "content", 30); assertThat(fragment, notNullValue()); diff --git a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java index 1232347edea64..2234c8a980923 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryPhaseTests.java @@ -49,9 +49,7 @@ import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.index.Term; -import org.opensearch.lucene.queries.MinDocQuery; import org.apache.lucene.queries.spans.SpanNearQuery; import org.apache.lucene.queries.spans.SpanTermQuery; import org.apache.lucene.search.BooleanClause.Occur; @@ -82,6 +80,7 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.opensearch.action.search.SearchShardTask; @@ -97,6 +96,7 @@ import org.opensearch.index.search.OpenSearchToParentBlockJoinQuery; import org.opensearch.index.shard.IndexShard; import org.opensearch.index.shard.IndexShardTestCase; +import org.opensearch.lucene.queries.MinDocQuery; import org.opensearch.search.DocValueFormat; import org.opensearch.search.collapse.CollapseBuilder; import org.opensearch.search.internal.ContextIndexSearcher; @@ -111,16 +111,16 @@ import java.util.Collections; import java.util.List; -import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.instanceOf; -import static org.opensearch.search.query.TopDocsCollectorContext.hasInfMaxScore; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; +import static org.opensearch.search.query.TopDocsCollectorContext.hasInfMaxScore; public class QueryPhaseTests extends IndexShardTestCase { @@ -1069,8 +1069,7 @@ public void testCancellationDuringPreprocess() throws IOException { try (IndexReader reader = DirectoryReader.open(dir)) { TestSearchContext context = new TestSearchContextWithRewriteAndCancellation(null, indexShard, newContextSearcher(reader)); - PrefixQuery prefixQuery = new PrefixQuery(new Term("foo", "a")); - prefixQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE); + PrefixQuery prefixQuery = new PrefixQuery(new Term("foo", "a"), MultiTermQuery.SCORING_BOOLEAN_REWRITE); context.parsedQuery(new ParsedQuery(prefixQuery)); SearchShardTask task = mock(SearchShardTask.class); when(task.isCancelled()).thenReturn(true); From dbdee30a376b2dcbb938790ad99959338bfa1c8e Mon Sep 17 00:00:00 2001 From: Suraj Singh Date: Wed, 20 Apr 2022 20:22:11 -0700 Subject: [PATCH 34/41] [Type Removal] Remove TypeFieldMapper usage, remove support of `_type` in searches and from LeafFieldsLookup (#3016) Removes TypeFieldMapper and _type support from searches Signed-off-by: Suraj Singh --- .../PercolatorFieldMapperTests.java | 4 +- .../test/search/160_exists_query.yml | 13 --- .../document/DocumentActionsIT.java | 9 +- .../aggregations/metrics/TopHitsIT.java | 4 +- .../search/fields/SearchFieldsIT.java | 36 ------ .../index/mapper/DocumentMapper.java | 1 - .../index/mapper/MapperService.java | 5 - .../index/mapper/TypeFieldMapper.java | 15 +-- .../org/opensearch/indices/IndicesModule.java | 2 - .../search/lookup/LeafFieldsLookup.java | 24 +--- .../index/engine/InternalEngineTests.java | 2 +- .../opensearch/index/get/GetResultTests.java | 6 +- .../index/mapper/TypeFieldMapperTests.java | 106 ------------------ .../index/mapper/TypeFieldTypeTests.java | 66 ----------- .../index/query/TermQueryBuilderTests.java | 7 -- .../index/query/TermsQueryBuilderTests.java | 7 -- .../query/WildcardQueryBuilderTests.java | 7 -- .../indices/IndicesModuleTests.java | 2 - .../support/ValuesSourceConfigTests.java | 20 ---- .../fetch/subphase/FieldFetcherTests.java | 5 + 20 files changed, 21 insertions(+), 320 deletions(-) delete mode 100644 server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java delete mode 100644 server/src/test/java/org/opensearch/index/mapper/TypeFieldTypeTests.java diff --git a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java index ca6f3a78b27d7..fe9c486b68166 100644 --- a/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/opensearch/percolator/PercolatorFieldMapperTests.java @@ -862,7 +862,7 @@ public void testUnsupportedQueries() { PercolatorFieldMapper.verifyQuery(rangeQuery1); PercolatorFieldMapper.verifyQuery(rangeQuery2); - HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("_type", new MatchAllQueryBuilder(), ScoreMode.None); + HasChildQueryBuilder hasChildQuery = new HasChildQueryBuilder("parent", new MatchAllQueryBuilder(), ScoreMode.None); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new DisMaxQueryBuilder().add(hasChildQuery))); PercolatorFieldMapper.verifyQuery(new ConstantScoreQueryBuilder((rangeQuery1))); @@ -881,7 +881,7 @@ public void testUnsupportedQueries() { expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasChildQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasChildQuery))); - HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("_type", new MatchAllQueryBuilder(), false); + HasParentQueryBuilder hasParentQuery = new HasParentQueryBuilder("parent", new MatchAllQueryBuilder(), false); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(hasParentQuery)); expectThrows(IllegalArgumentException.class, () -> PercolatorFieldMapper.verifyQuery(new BoolQueryBuilder().must(hasParentQuery))); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml index 201e456be2cdd..be97930d41eb9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/160_exists_query.yml @@ -561,19 +561,6 @@ setup: - match: {hits.total: 4} ---- -"Test exists query on _type field": - - do: - search: - rest_total_hits_as_int: true - index: test - body: - query: - exists: - field: _type - - - match: {hits.total: 4} - --- "Test exists query on _routing field": - do: diff --git a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java index 1e40cc14bbb36..fa94d5c1c5024 100644 --- a/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/document/DocumentActionsIT.java @@ -47,7 +47,6 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.mapper.MapperService; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; @@ -58,7 +57,7 @@ import static org.opensearch.client.Requests.getRequest; import static org.opensearch.client.Requests.indexRequest; import static org.opensearch.client.Requests.refreshRequest; -import static org.opensearch.index.query.QueryBuilders.termQuery; +import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.equalTo; @@ -181,11 +180,7 @@ public void testIndexActions() throws Exception { // check count for (int i = 0; i < 5; i++) { // test successful - SearchResponse countResponse = client().prepareSearch("test") - .setSize(0) - .setQuery(termQuery("_type", MapperService.SINGLE_MAPPING_NAME)) - .execute() - .actionGet(); + SearchResponse countResponse = client().prepareSearch("test").setSize(0).setQuery(matchAllQuery()).execute().actionGet(); assertNoFailures(countResponse); assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java index c3240c5eef7c5..4c5c42b773e93 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/metrics/TopHitsIT.java @@ -1386,7 +1386,7 @@ public void testWithRescore() { SearchResponse response = client().prepareSearch("idx") .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation( - terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_type"))) + terms("terms").field(TERMS_AGGS_FIELD).subAggregation(topHits("hits").sort(SortBuilders.fieldSort("_index"))) ) .get(); Terms terms = response.getAggregations().get("terms"); @@ -1403,7 +1403,7 @@ public void testWithRescore() { .addRescorer(new QueryRescorerBuilder(new MatchAllQueryBuilder().boost(3.0f))) .addAggregation( terms("terms").field(TERMS_AGGS_FIELD) - .subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_type"))) + .subAggregation(topHits("hits").sort(SortBuilders.scoreSort()).sort(SortBuilders.fieldSort("_index"))) ) .get(); Terms terms = response.getAggregations().get("terms"); diff --git a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java index 25782f8dc18db..941f4982af9cc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/fields/SearchFieldsIT.java @@ -471,42 +471,6 @@ public void testIdBasedScriptFields() throws Exception { assertThat(fields, equalTo(singleton("id"))); assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); } - - response = client().prepareSearch() - .setQuery(matchAllQuery()) - .addSort("num1", SortOrder.ASC) - .setSize(numDocs) - .addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) - .get(); - - assertNoFailures(response); - - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); - for (int i = 0; i < numDocs; i++) { - assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); - assertThat(fields, equalTo(singleton("type"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); - } - - response = client().prepareSearch() - .setQuery(matchAllQuery()) - .addSort("num1", SortOrder.ASC) - .setSize(numDocs) - .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())) - .addScriptField("type", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._type.value", Collections.emptyMap())) - .get(); - - assertNoFailures(response); - - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); - for (int i = 0; i < numDocs; i++) { - assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); - Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); - assertThat(fields, equalTo(newHashSet("type", "id"))); - assertThat(response.getHits().getAt(i).getFields().get("type").getValue(), equalTo(MapperService.SINGLE_MAPPING_NAME)); - assertThat(response.getHits().getAt(i).getFields().get("id").getValue(), equalTo(Integer.toString(i))); - } } public void testScriptFieldUsingSource() throws Exception { diff --git a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java index 0ee0a3cb9a180..0bebfa024e185 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/DocumentMapper.java @@ -161,7 +161,6 @@ public DocumentMapper(MapperService mapperService, Mapping mapping) { final Collection deleteTombstoneMetadataFields = Arrays.asList( VersionFieldMapper.NAME, IdFieldMapper.NAME, - TypeFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME, SeqNoFieldMapper.TOMBSTONE_NAME diff --git a/server/src/main/java/org/opensearch/index/mapper/MapperService.java b/server/src/main/java/org/opensearch/index/mapper/MapperService.java index a92647929ff08..819df4a6f396e 100644 --- a/server/src/main/java/org/opensearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/opensearch/index/mapper/MapperService.java @@ -576,11 +576,6 @@ public DocumentMapperForType documentMapperWithAutoCreate() { * Given the full name of a field, returns its {@link MappedFieldType}. */ public MappedFieldType fieldType(String fullName) { - if (fullName.equals(TypeFieldMapper.NAME)) { - String type = mapper == null ? null : mapper.type(); - return new TypeFieldMapper.TypeFieldType(type); - } - return this.mapper == null ? null : this.mapper.fieldTypes().get(fullName); } diff --git a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java index 9adb1430b3df0..8d3f1df677040 100644 --- a/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/opensearch/index/mapper/TypeFieldMapper.java @@ -41,7 +41,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.opensearch.common.geo.ShapeRelation; -import org.opensearch.common.logging.DeprecationLogger; import org.opensearch.common.regex.Regex; import org.opensearch.common.time.DateMathParser; import org.opensearch.index.fielddata.IndexFieldData; @@ -55,17 +54,9 @@ import java.util.Objects; import java.util.function.Supplier; +// Todo: Remove TypeFieldMapper once we have NestedFieldMapper implementation public class TypeFieldMapper extends MetadataFieldMapper { - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TypeFieldType.class); - - public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using the _type field " - + "in queries and aggregations is deprecated, prefer to use a field instead."; - - public static void emitTypesDeprecationWarning() { - deprecationLogger.deprecate("query_with_types", TYPES_DEPRECATION_MESSAGE); - } - public static final String NAME = "_type"; public static final String CONTENT_TYPE = "_type"; @@ -101,7 +92,6 @@ public String typeName() { @Override public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier searchLookup) { - emitTypesDeprecationWarning(); return new ConstantIndexFieldData.Builder(type, name(), CoreValuesSourceType.BYTES); } @@ -112,13 +102,11 @@ public ValueFetcher valueFetcher(QueryShardContext context, SearchLookup lookup, @Override public Query existsQuery(QueryShardContext context) { - emitTypesDeprecationWarning(); return new MatchAllDocsQuery(); } @Override protected boolean matches(String pattern, boolean caseInsensitive, QueryShardContext context) { - emitTypesDeprecationWarning(); if (type == null) { return false; } @@ -136,7 +124,6 @@ public Query rangeQuery( DateMathParser parser, QueryShardContext context ) { - emitTypesDeprecationWarning(); BytesRef lower = (BytesRef) lowerTerm; BytesRef upper = (BytesRef) upperTerm; if (includeLower) { diff --git a/server/src/main/java/org/opensearch/indices/IndicesModule.java b/server/src/main/java/org/opensearch/indices/IndicesModule.java index e685ea52aa5ca..9a7b91f020e36 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesModule.java +++ b/server/src/main/java/org/opensearch/indices/IndicesModule.java @@ -64,7 +64,6 @@ import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.TextFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.index.seqno.RetentionLeaseBackgroundSyncAction; import org.opensearch.index.seqno.RetentionLeaseSyncAction; @@ -185,7 +184,6 @@ private static Map initBuiltInMetadataMa builtInMetadataMappers.put(IndexFieldMapper.NAME, IndexFieldMapper.PARSER); builtInMetadataMappers.put(DataStreamFieldMapper.NAME, DataStreamFieldMapper.PARSER); builtInMetadataMappers.put(SourceFieldMapper.NAME, SourceFieldMapper.PARSER); - builtInMetadataMappers.put(TypeFieldMapper.NAME, TypeFieldMapper.PARSER); builtInMetadataMappers.put(VersionFieldMapper.NAME, VersionFieldMapper.PARSER); builtInMetadataMappers.put(SeqNoFieldMapper.NAME, SeqNoFieldMapper.PARSER); // _field_names must be added last so that it has a chance to see all the other mappers diff --git a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java index 14c5dade52c87..62b040dfdc8d7 100644 --- a/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java +++ b/server/src/main/java/org/opensearch/search/lookup/LeafFieldsLookup.java @@ -34,10 +34,8 @@ import org.apache.lucene.index.LeafReader; import org.opensearch.OpenSearchParseException; import org.opensearch.index.fieldvisitor.SingleFieldsVisitor; -import org.opensearch.index.mapper.DocumentMapper; import org.opensearch.index.mapper.MappedFieldType; import org.opensearch.index.mapper.MapperService; -import org.opensearch.index.mapper.TypeFieldMapper; import java.io.IOException; import java.util.ArrayList; @@ -147,22 +145,12 @@ private FieldLookup loadFieldData(String name) { cachedFieldData.put(name, data); } if (data.fields() == null) { - List values; - if (TypeFieldMapper.NAME.equals(data.fieldType().name())) { - TypeFieldMapper.emitTypesDeprecationWarning(); - values = new ArrayList<>(1); - final DocumentMapper mapper = mapperService.documentMapper(); - if (mapper != null) { - values.add(mapper.type()); - } - } else { - values = new ArrayList(2); - SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); - try { - reader.document(docId, visitor); - } catch (IOException e) { - throw new OpenSearchParseException("failed to load field [{}]", e, name); - } + List values = new ArrayList<>(2); + SingleFieldsVisitor visitor = new SingleFieldsVisitor(data.fieldType(), values); + try { + reader.document(docId, visitor); + } catch (IOException e) { + throw new OpenSearchParseException("failed to load field [{}]", e, name); } data.fields(singletonMap(data.fieldType().name(), values)); } diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index c33adf3bcb558..cbae55a047a1e 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -381,7 +381,7 @@ public void testSegmentsWithMergeFlag() throws Exception { } public void testSegmentsWithIndexSort() throws Exception { - Sort indexSort = new Sort(new SortedSetSortField("_type", false)); + Sort indexSort = new Sort(new SortedSetSortField("field", false)); try ( Store store = createStore(); Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null, null, null, indexSort, null) diff --git a/server/src/test/java/org/opensearch/index/get/GetResultTests.java b/server/src/test/java/org/opensearch/index/get/GetResultTests.java index 9519b83fa54b1..03621f83e8af2 100644 --- a/server/src/test/java/org/opensearch/index/get/GetResultTests.java +++ b/server/src/test/java/org/opensearch/index/get/GetResultTests.java @@ -46,7 +46,6 @@ import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.RandomObjects; @@ -372,9 +371,8 @@ public static Tuple, Map> rand Map fields = new HashMap<>(numFields); Map expectedFields = new HashMap<>(numFields); // As we are using this to construct a GetResult object that already contains - // index, type, id, version, seqNo, and source fields, we need to exclude them from random fields - Predicate excludeMetaFieldFilter = field -> field.equals(TypeFieldMapper.NAME) - || field.equals(IndexFieldMapper.NAME) + // index, id, version, seqNo, and source fields, we need to exclude them from random fields + Predicate excludeMetaFieldFilter = field -> field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME) || field.equals(VersionFieldMapper.NAME) || field.equals(SourceFieldMapper.NAME) diff --git a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java deleted file mode 100644 index 89eee655ca9d4..0000000000000 --- a/server/src/test/java/org/opensearch/index/mapper/TypeFieldMapperTests.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.mapper; - -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.store.Directory; -import org.apache.lucene.util.BytesRef; -import org.opensearch.common.bytes.BytesArray; -import org.opensearch.common.compress.CompressedXContent; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentType; -import org.opensearch.index.IndexService; -import org.opensearch.index.fielddata.IndexFieldDataCache; -import org.opensearch.index.fielddata.IndexOrdinalsFieldData; -import org.opensearch.index.fielddata.LeafOrdinalsFieldData; -import org.opensearch.index.mapper.MapperService.MergeReason; -import org.opensearch.indices.breaker.NoneCircuitBreakerService; -import org.opensearch.plugins.Plugin; -import org.opensearch.test.OpenSearchSingleNodeTestCase; -import org.opensearch.test.InternalSettingsPlugin; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.function.Function; - -public class TypeFieldMapperTests extends OpenSearchSingleNodeTestCase { - - @Override - protected Collection> getPlugins() { - return pluginList(InternalSettingsPlugin.class); - } - - public void testDocValuesSingleType() throws Exception { - testDocValues(this::createIndex); - assertWarnings("[types removal] Using the _type field in queries and aggregations is deprecated, prefer to use a field instead."); - } - - public static void testDocValues(Function createIndex) throws IOException { - MapperService mapperService = createIndex.apply("test").mapperService(); - DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); - - Directory dir = newDirectory(); - IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()); - w.addDocument(document.rootDoc()); - DirectoryReader r = DirectoryReader.open(w); - w.close(); - - MappedFieldType ft = mapperService.fieldType(TypeFieldMapper.NAME); - IndexOrdinalsFieldData fd = (IndexOrdinalsFieldData) ft.fielddataBuilder( - "test", - () -> { throw new UnsupportedOperationException(); } - ).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()); - LeafOrdinalsFieldData afd = fd.load(r.leaves().get(0)); - SortedSetDocValues values = afd.getOrdinalsValues(); - assertTrue(values.advanceExact(0)); - assertEquals(0, values.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, values.nextOrd()); - assertEquals(new BytesRef("type"), values.lookupOrd(0)); - r.close(); - dir.close(); - } - - public void testDefaults() throws IOException { - Settings indexSettings = Settings.EMPTY; - MapperService mapperService = createIndex("test", indexSettings).mapperService(); - DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); - ParsedDocument document = mapper.parse(new SourceToParse("index", "id", new BytesArray("{}"), XContentType.JSON)); - assertEquals(Collections.emptyList(), Arrays.asList(document.rootDoc().getFields(TypeFieldMapper.NAME))); - } -} diff --git a/server/src/test/java/org/opensearch/index/mapper/TypeFieldTypeTests.java b/server/src/test/java/org/opensearch/index/mapper/TypeFieldTypeTests.java deleted file mode 100644 index 66377a16b90d4..0000000000000 --- a/server/src/test/java/org/opensearch/index/mapper/TypeFieldTypeTests.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.index.mapper; - -import org.apache.lucene.search.MatchAllDocsQuery; -import org.apache.lucene.search.MatchNoDocsQuery; -import org.apache.lucene.search.Query; -import org.opensearch.index.query.QueryShardContext; -import org.opensearch.test.OpenSearchTestCase; -import org.mockito.Mockito; - -import java.util.Arrays; - -public class TypeFieldTypeTests extends OpenSearchTestCase { - - public void testTermsQuery() { - QueryShardContext context = Mockito.mock(QueryShardContext.class); - - TypeFieldMapper.TypeFieldType ft = new TypeFieldMapper.TypeFieldType("_doc"); - Query query = ft.termQuery("my_type", context); - assertEquals(new MatchNoDocsQuery(), query); - - query = ft.termQuery("_doc", context); - assertEquals(new MatchAllDocsQuery(), query); - - query = ft.termsQuery(Arrays.asList("_doc", "type", "foo"), context); - assertEquals(new MatchAllDocsQuery(), query); - - query = ft.termsQuery(Arrays.asList("type", "foo"), context); - assertEquals(new MatchNoDocsQuery(), query); - - query = ft.termQueryCaseInsensitive("_DOC", context); - assertEquals(new MatchAllDocsQuery(), query); - - assertWarnings("[types removal] Using the _type field in queries and aggregations is deprecated, prefer to use a field instead."); - } -} diff --git a/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java index 9cac88a256a0a..cc877c7590c6a 100644 --- a/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermQueryBuilderTests.java @@ -41,7 +41,6 @@ import org.apache.lucene.search.TermQuery; import org.opensearch.common.ParsingException; import org.opensearch.index.mapper.MappedFieldType; -import org.opensearch.index.mapper.TypeFieldMapper; import java.io.IOException; @@ -198,12 +197,6 @@ public void testParseAndSerializeBigInteger() throws IOException { assertSerialization(parsedQuery); } - public void testTypeField() throws IOException { - TermQueryBuilder builder = QueryBuilders.termQuery("_type", "value1"); - builder.doToQuery(createShardContext()); - assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE); - } - public void testRewriteIndexQueryToMatchNone() throws IOException { TermQueryBuilder query = QueryBuilders.termQuery("_index", "does_not_exist"); QueryShardContext queryShardContext = createShardContext(); diff --git a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java index ea93d7a65b951..e9a285208f1a6 100644 --- a/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/TermsQueryBuilderTests.java @@ -50,7 +50,6 @@ import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.get.GetResult; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.indices.TermsLookup; import org.opensearch.test.AbstractQueryTestCase; import org.hamcrest.CoreMatchers; @@ -351,12 +350,6 @@ public void testConversion() { assertEquals(Arrays.asList(5, 42d), TermsQueryBuilder.convertBack(TermsQueryBuilder.convert(list))); } - public void testTypeField() throws IOException { - TermsQueryBuilder builder = QueryBuilders.termsQuery("_type", "value1", "value2"); - builder.doToQuery(createShardContext()); - assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE); - } - public void testRewriteIndexQueryToMatchNone() throws IOException { TermsQueryBuilder query = new TermsQueryBuilder("_index", "does_not_exist", "also_does_not_exist"); QueryShardContext queryShardContext = createShardContext(); diff --git a/server/src/test/java/org/opensearch/index/query/WildcardQueryBuilderTests.java b/server/src/test/java/org/opensearch/index/query/WildcardQueryBuilderTests.java index c8a4207f21c25..9e99b7667f3e0 100644 --- a/server/src/test/java/org/opensearch/index/query/WildcardQueryBuilderTests.java +++ b/server/src/test/java/org/opensearch/index/query/WildcardQueryBuilderTests.java @@ -36,7 +36,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.WildcardQuery; import org.opensearch.common.ParsingException; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.test.AbstractQueryTestCase; import java.io.IOException; @@ -150,12 +149,6 @@ public void testParseFailsWithMultipleFields() throws IOException { assertEquals("[wildcard] query doesn't support multiple fields, found [user1] and [user2]", e.getMessage()); } - public void testTypeField() throws IOException { - WildcardQueryBuilder builder = QueryBuilders.wildcardQuery("_type", "doc*"); - builder.doToQuery(createShardContext()); - assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE); - } - public void testRewriteIndexQueryToMatchNone() throws IOException { WildcardQueryBuilder query = new WildcardQueryBuilder("_index", "does_not_exist"); QueryShardContext queryShardContext = createShardContext(); diff --git a/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java b/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java index c2298f60e4a2b..8123f044798bd 100644 --- a/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java +++ b/server/src/test/java/org/opensearch/indices/IndicesModuleTests.java @@ -45,7 +45,6 @@ import org.opensearch.index.mapper.SeqNoFieldMapper; import org.opensearch.index.mapper.SourceFieldMapper; import org.opensearch.index.mapper.TextFieldMapper; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.mapper.VersionFieldMapper; import org.opensearch.indices.mapper.MapperRegistry; import org.opensearch.plugins.MapperPlugin; @@ -95,7 +94,6 @@ public Map getMetadataMappers() { IndexFieldMapper.NAME, DataStreamFieldMapper.NAME, SourceFieldMapper.NAME, - TypeFieldMapper.NAME, VersionFieldMapper.NAME, SeqNoFieldMapper.NAME, FieldNamesFieldMapper.NAME }; diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java index f866d817a7c43..33d9a63f61a35 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -40,7 +40,6 @@ import org.opensearch.index.IndexService; import org.opensearch.index.engine.Engine; import org.opensearch.index.fielddata.SortedBinaryDocValues; -import org.opensearch.index.mapper.TypeFieldMapper; import org.opensearch.index.query.QueryShardContext; import org.opensearch.test.OpenSearchSingleNodeTestCase; @@ -310,25 +309,6 @@ public void testUnmappedBoolean() throws Exception { } } - public void testTypeFieldDeprecation() { - IndexService indexService = createIndex("index", Settings.EMPTY, "type"); - try (Engine.Searcher searcher = indexService.getShard(0).acquireSearcher("test")) { - QueryShardContext context = indexService.newQueryShardContext(0, searcher, () -> 42L, null); - - ValuesSourceConfig config = ValuesSourceConfig.resolve( - context, - null, - TypeFieldMapper.NAME, - null, - null, - null, - null, - CoreValuesSourceType.BYTES - ); - assertWarnings(TypeFieldMapper.TYPES_DEPRECATION_MESSAGE); - } - } - public void testFieldAlias() throws Exception { IndexService indexService = createIndex("index", Settings.EMPTY, "type", "field", "type=keyword", "alias", "type=alias,path=field"); client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/FieldFetcherTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/FieldFetcherTests.java index 8147d1afb8c15..0aff1efff88ef 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/FieldFetcherTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/FieldFetcherTests.java @@ -117,6 +117,11 @@ public void testMetadataFields() throws IOException { Map fields = fetchFields(mapperService, source, "_routing"); assertTrue(fields.isEmpty()); + + // The _type field was deprecated in 7.x and is not supported in 2.0. So the behavior + // should be the same as if the field didn't exist. + fields = fetchFields(mapperService, source, "_type"); + assertTrue(fields.isEmpty()); } public void testFetchAllFields() throws IOException { From e9ad90b9f6bce1fafc9eac35ad74fd719a051c01 Mon Sep 17 00:00:00 2001 From: Owais Kazi Date: Wed, 20 Apr 2022 20:36:58 -0700 Subject: [PATCH 35/41] Removed binary file from linelint (#3015) Signed-off-by: Owais Kazi --- .gitignore | 3 +++ .linelint.yml | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index e2cb6d8d37a82..8ea328ce2f1e9 100644 --- a/.gitignore +++ b/.gitignore @@ -56,3 +56,6 @@ testfixtures_shared/ # These are generated from .ci/jobs.t .ci/jobs/ + +# build files generated +doc-tools/missing-doclet/bin/ diff --git a/.linelint.yml b/.linelint.yml index 7b7bc162eef28..6240c8b3d7a96 100644 --- a/.linelint.yml +++ b/.linelint.yml @@ -12,7 +12,6 @@ ignore: - 'buildSrc/src/testKit/opensearch.build/NOTICE' - 'server/licenses/apache-log4j-extras-DEPENDENCIES' # Empty files - - 'doc-tools/missing-doclet/bin/main/org/opensearch/missingdoclet/MissingDoclet.class' - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/build.gradle' - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/archives/oss-darwin-tar/build.gradle' - 'buildSrc/src/integTest/resources/org/opensearch/gradle/internal/fake_git/remote/distribution/bwc/bugfix/build.gradle' From 6517eeca507943757475fbe4427305bfc10b3d17 Mon Sep 17 00:00:00 2001 From: Tushar Kharbanda Date: Thu, 21 Apr 2022 19:21:44 +0530 Subject: [PATCH 36/41] Support task resource tracking in OpenSearch (#2639) * Add Task id in Thread Context Signed-off-by: Tushar Kharbanda * Add resource tracking update support for tasks Signed-off-by: Tushar Kharbanda * List tasks action support for task resource refresh Signed-off-by: Tushar Kharbanda * Handle task unregistration case on same thread Signed-off-by: Tushar Kharbanda * Add lazy initialisation for RunnableTaskExecutionListener Signed-off-by: Tushar Kharbanda * Segregate resource tracking logic to a separate service. Signed-off-by: Tushar Kharbanda * Check for running threads during task unregister Signed-off-by: Tushar Kharbanda * Moved thread context logic to resource tracking service Signed-off-by: Tushar Kharbanda * preserve task id in thread context even after stash Signed-off-by: Tushar Kharbanda * Add null check for resource tracking service Signed-off-by: Tushar Kharbanda * Tracking service tests and minor refactoring Signed-off-by: Tushar Kharbanda * Preserve task id fix with test Signed-off-by: Tushar Kharbanda * Minor test changes and Task tracking call update Signed-off-by: Tushar Kharbanda * Fix Auto Queue executor method's signature Signed-off-by: Tushar Kharbanda * Make task runnable task listener factory implement consumer Signed-off-by: Tushar Kharbanda * Use reflection for ThreadMXBean Signed-off-by: Tushar Kharbanda * Formatting Signed-off-by: Tushar Kharbanda * Replace RunnableTaskExecutionListenerFactory with AtomicReference Signed-off-by: Tushar Kharbanda * Revert "Use reflection for ThreadMXBean" This reverts commit cbcf3c525bf516fb7164f0221491a7b25c1f96ec. Signed-off-by: Tushar Kharbanda * Suppress Warning related to ThreadMXBean Signed-off-by: Tushar Kharbanda * Add separate method for task resource tracking supported check Signed-off-by: Tushar Kharbanda * Enabled setting by default Signed-off-by: Tushar Kharbanda * Add debug logs for stale context id Signed-off-by: Tushar Kharbanda * Remove hardcoded task overhead in tests Signed-off-by: Tushar Kharbanda * Bump stale task id in thread context log level to warn Signed-off-by: Tushar Kharbanda * Improve assertions and logging Signed-off-by: Tushar Kharbanda Co-authored-by: Tushar Kharbanda --- .../admin/cluster/node/tasks/TasksIT.java | 6 + .../tasks/list/TransportListTasksAction.java | 13 +- .../action/search/SearchShardTask.java | 5 + .../opensearch/action/search/SearchTask.java | 5 + .../action/support/TransportAction.java | 78 ++- .../org/opensearch/cluster/ClusterModule.java | 2 + .../common/settings/ClusterSettings.java | 4 +- .../util/concurrent/OpenSearchExecutors.java | 52 +- .../common/util/concurrent/ThreadContext.java | 16 +- .../main/java/org/opensearch/node/Node.java | 13 +- .../main/java/org/opensearch/tasks/Task.java | 17 +- .../org/opensearch/tasks/TaskManager.java | 27 +- .../tasks/TaskResourceTrackingService.java | 255 +++++++ .../opensearch/tasks/ThreadResourceInfo.java | 10 +- .../AutoQueueAdjustingExecutorBuilder.java | 19 +- .../RunnableTaskExecutionListener.java | 33 + .../threadpool/TaskAwareRunnable.java | 90 +++ .../org/opensearch/threadpool/ThreadPool.java | 22 +- .../transport/RequestHandlerRegistry.java | 4 + .../tasks/RecordingTaskManagerListener.java | 3 + .../node/tasks/ResourceAwareTasksTests.java | 633 ++++++++++++++++++ .../node/tasks/TaskManagerTestCase.java | 17 +- .../bulk/TransportBulkActionIngestTests.java | 3 +- .../util/concurrent/ThreadContextTests.java | 10 + .../snapshots/SnapshotResiliencyTests.java | 3 + .../opensearch/tasks/TaskManagerTests.java | 6 +- .../TaskResourceTrackingServiceTests.java | 97 +++ .../test/tasks/MockTaskManager.java | 16 + .../test/tasks/MockTaskManagerListener.java | 3 + .../opensearch/threadpool/TestThreadPool.java | 20 +- 30 files changed, 1421 insertions(+), 61 deletions(-) create mode 100644 server/src/main/java/org/opensearch/tasks/TaskResourceTrackingService.java create mode 100644 server/src/main/java/org/opensearch/threadpool/RunnableTaskExecutionListener.java create mode 100644 server/src/main/java/org/opensearch/threadpool/TaskAwareRunnable.java create mode 100644 server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java create mode 100644 server/src/test/java/org/opensearch/tasks/TaskResourceTrackingServiceTests.java diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java index ac0ae44eb732e..c74f992970545 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/cluster/node/tasks/TasksIT.java @@ -470,6 +470,9 @@ public void onTaskUnregistered(Task task) {} @Override public void waitForTaskCompletion(Task task) {} + + @Override + public void taskExecutionStarted(Task task, Boolean closeableInvoked) {} }); } // Need to run the task in a separate thread because node client's .execute() is blocked by our task listener @@ -651,6 +654,9 @@ public void waitForTaskCompletion(Task task) { waitForWaitingToStart.countDown(); } + @Override + public void taskExecutionStarted(Task task, Boolean closeableInvoked) {} + @Override public void onTaskRegistered(Task task) {} diff --git a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index b7875c5f99774..df448d2665434 100644 --- a/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/server/src/main/java/org/opensearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -42,6 +42,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskInfo; +import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -60,8 +61,15 @@ public static long waitForCompletionTimeout(TimeValue timeout) { private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); + private final TaskResourceTrackingService taskResourceTrackingService; + @Inject - public TransportListTasksAction(ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { + public TransportListTasksAction( + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + TaskResourceTrackingService taskResourceTrackingService + ) { super( ListTasksAction.NAME, clusterService, @@ -72,6 +80,7 @@ public TransportListTasksAction(ClusterService clusterService, TransportService TaskInfo::new, ThreadPool.Names.MANAGEMENT ); + this.taskResourceTrackingService = taskResourceTrackingService; } @Override @@ -101,6 +110,8 @@ protected void processTasks(ListTasksRequest request, Consumer operation) } taskManager.waitForTaskCompletion(task, timeoutNanos); }); + } else { + operation = operation.andThen(taskResourceTrackingService::refreshResourceStats); } super.processTasks(request, operation); } diff --git a/server/src/main/java/org/opensearch/action/search/SearchShardTask.java b/server/src/main/java/org/opensearch/action/search/SearchShardTask.java index 2e506c6fe181b..f09701c7769eb 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchShardTask.java +++ b/server/src/main/java/org/opensearch/action/search/SearchShardTask.java @@ -49,6 +49,11 @@ public SearchShardTask(long id, String type, String action, String description, super(id, type, action, description, parentTaskId, headers); } + @Override + public boolean supportsResourceTracking() { + return true; + } + @Override public boolean shouldCancelChildrenOnCancellation() { return false; diff --git a/server/src/main/java/org/opensearch/action/search/SearchTask.java b/server/src/main/java/org/opensearch/action/search/SearchTask.java index 7f80f7836be6c..bf6f141a3e829 100644 --- a/server/src/main/java/org/opensearch/action/search/SearchTask.java +++ b/server/src/main/java/org/opensearch/action/search/SearchTask.java @@ -78,6 +78,11 @@ public final String getDescription() { return descriptionSupplier.get(); } + @Override + public boolean supportsResourceTracking() { + return true; + } + /** * Attach a {@link SearchProgressListener} to this task. */ diff --git a/server/src/main/java/org/opensearch/action/support/TransportAction.java b/server/src/main/java/org/opensearch/action/support/TransportAction.java index 84ece8cfec530..83fca715c7e28 100644 --- a/server/src/main/java/org/opensearch/action/support/TransportAction.java +++ b/server/src/main/java/org/opensearch/action/support/TransportAction.java @@ -40,6 +40,7 @@ import org.opensearch.action.ActionResponse; import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; +import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskCancelledException; import org.opensearch.tasks.TaskId; @@ -88,31 +89,39 @@ public final Task execute(Request request, ActionListener listener) { */ final Releasable unregisterChildNode = registerChildNode(request.getParentTask()); final Task task; + try { task = taskManager.register("transport", actionName, request); } catch (TaskCancelledException e) { unregisterChildNode.close(); throw e; } - execute(task, request, new ActionListener() { - @Override - public void onResponse(Response response) { - try { - Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); - } finally { - listener.onResponse(response); + + ThreadContext.StoredContext storedContext = taskManager.taskExecutionStarted(task); + try { + execute(task, request, new ActionListener() { + @Override + public void onResponse(Response response) { + try { + Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); + } finally { + listener.onResponse(response); + } } - } - @Override - public void onFailure(Exception e) { - try { - Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); - } finally { - listener.onFailure(e); + @Override + public void onFailure(Exception e) { + try { + Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); + } finally { + listener.onFailure(e); + } } - } - }); + }); + } finally { + storedContext.close(); + } + return task; } @@ -129,25 +138,30 @@ public final Task execute(Request request, TaskListener listener) { unregisterChildNode.close(); throw e; } - execute(task, request, new ActionListener() { - @Override - public void onResponse(Response response) { - try { - Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); - } finally { - listener.onResponse(task, response); + ThreadContext.StoredContext storedContext = taskManager.taskExecutionStarted(task); + try { + execute(task, request, new ActionListener() { + @Override + public void onResponse(Response response) { + try { + Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); + } finally { + listener.onResponse(task, response); + } } - } - @Override - public void onFailure(Exception e) { - try { - Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); - } finally { - listener.onFailure(task, e); + @Override + public void onFailure(Exception e) { + try { + Releasables.close(unregisterChildNode, () -> taskManager.unregister(task)); + } finally { + listener.onFailure(task, e); + } } - } - }); + }); + } finally { + storedContext.close(); + } return task; } diff --git a/server/src/main/java/org/opensearch/cluster/ClusterModule.java b/server/src/main/java/org/opensearch/cluster/ClusterModule.java index c85691b80d7c3..b9f3a2a99f0b7 100644 --- a/server/src/main/java/org/opensearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/opensearch/cluster/ClusterModule.java @@ -94,6 +94,7 @@ import org.opensearch.script.ScriptMetadata; import org.opensearch.snapshots.SnapshotsInfoService; import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.tasks.TaskResultsService; import java.util.ArrayList; @@ -394,6 +395,7 @@ protected void configure() { bind(NodeMappingRefreshAction.class).asEagerSingleton(); bind(MappingUpdatedAction.class).asEagerSingleton(); bind(TaskResultsService.class).asEagerSingleton(); + bind(TaskResourceTrackingService.class).asEagerSingleton(); bind(AllocationDeciders.class).toInstance(allocationDeciders); bind(ShardsAllocator.class).toInstance(shardsAllocator); } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index c758b7d2918e7..4cacc3bcf37eb 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -40,6 +40,7 @@ import org.opensearch.index.ShardIndexingPressureMemoryManager; import org.opensearch.index.ShardIndexingPressureSettings; import org.opensearch.index.ShardIndexingPressureStore; +import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.watcher.ResourceWatcherService; import org.opensearch.action.admin.cluster.configuration.TransportAddVotingConfigExclusionsAction; import org.opensearch.action.admin.indices.close.TransportCloseIndexAction; @@ -568,7 +569,8 @@ public void apply(Settings value, Settings current, Settings previous) { ShardIndexingPressureMemoryManager.THROUGHPUT_DEGRADATION_LIMITS, ShardIndexingPressureMemoryManager.SUCCESSFUL_REQUEST_ELAPSED_TIMEOUT, ShardIndexingPressureMemoryManager.MAX_OUTSTANDING_REQUESTS, - IndexingPressure.MAX_INDEXING_BYTES + IndexingPressure.MAX_INDEXING_BYTES, + TaskResourceTrackingService.TASK_RESOURCE_TRACKING_ENABLED ) ) ); diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java index 5a967528a6ae2..9e28bb2b795c3 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/OpenSearchExecutors.java @@ -40,6 +40,8 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.node.Node; +import org.opensearch.threadpool.RunnableTaskExecutionListener; +import org.opensearch.threadpool.TaskAwareRunnable; import java.util.List; import java.util.Optional; @@ -55,6 +57,7 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; public class OpenSearchExecutors { @@ -172,14 +175,39 @@ public static OpenSearchThreadPoolExecutor newFixed( ); } + public static OpenSearchThreadPoolExecutor newAutoQueueFixed( + String name, + int size, + int initialQueueCapacity, + int minQueueSize, + int maxQueueSize, + int frameSize, + TimeValue targetedResponseTime, + ThreadFactory threadFactory, + ThreadContext contextHolder + ) { + return newAutoQueueFixed( + name, + size, + initialQueueCapacity, + minQueueSize, + maxQueueSize, + frameSize, + targetedResponseTime, + threadFactory, + contextHolder, + null + ); + } + /** * Return a new executor that will automatically adjust the queue size based on queue throughput. * - * @param size number of fixed threads to use for executing tasks + * @param size number of fixed threads to use for executing tasks * @param initialQueueCapacity initial size of the executor queue - * @param minQueueSize minimum queue size that the queue can be adjusted to - * @param maxQueueSize maximum queue size that the queue can be adjusted to - * @param frameSize number of tasks during which stats are collected before adjusting queue size + * @param minQueueSize minimum queue size that the queue can be adjusted to + * @param maxQueueSize maximum queue size that the queue can be adjusted to + * @param frameSize number of tasks during which stats are collected before adjusting queue size */ public static OpenSearchThreadPoolExecutor newAutoQueueFixed( String name, @@ -190,7 +218,8 @@ public static OpenSearchThreadPoolExecutor newAutoQueueFixed( int frameSize, TimeValue targetedResponseTime, ThreadFactory threadFactory, - ThreadContext contextHolder + ThreadContext contextHolder, + AtomicReference runnableTaskListener ) { if (initialQueueCapacity <= 0) { throw new IllegalArgumentException( @@ -201,6 +230,17 @@ public static OpenSearchThreadPoolExecutor newAutoQueueFixed( ConcurrentCollections.newBlockingQueue(), initialQueueCapacity ); + + Function runnableWrapper; + if (runnableTaskListener != null) { + runnableWrapper = (runnable) -> { + TaskAwareRunnable taskAwareRunnable = new TaskAwareRunnable(contextHolder, runnable, runnableTaskListener); + return new TimedRunnable(taskAwareRunnable); + }; + } else { + runnableWrapper = TimedRunnable::new; + } + return new QueueResizingOpenSearchThreadPoolExecutor( name, size, @@ -210,7 +250,7 @@ public static OpenSearchThreadPoolExecutor newAutoQueueFixed( queue, minQueueSize, maxQueueSize, - TimedRunnable::new, + runnableWrapper, frameSize, targetedResponseTime, threadFactory, diff --git a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java index d844a8f158ea4..35d7d925ce106 100644 --- a/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/opensearch/common/util/concurrent/ThreadContext.java @@ -66,6 +66,7 @@ import static org.opensearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_COUNT; import static org.opensearch.http.HttpTransportSettings.SETTING_HTTP_MAX_WARNING_HEADER_SIZE; +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; /** * A ThreadContext is a map of string headers and a transient map of keyed objects that are associated with @@ -134,16 +135,23 @@ public StoredContext stashContext() { * This is needed so the DeprecationLogger in another thread can see the value of X-Opaque-ID provided by a user. * Otherwise when context is stash, it should be empty. */ + + ThreadContextStruct threadContextStruct = DEFAULT_CONTEXT; + if (context.requestHeaders.containsKey(Task.X_OPAQUE_ID)) { - ThreadContextStruct threadContextStruct = DEFAULT_CONTEXT.putHeaders( + threadContextStruct = threadContextStruct.putHeaders( MapBuilder.newMapBuilder() .put(Task.X_OPAQUE_ID, context.requestHeaders.get(Task.X_OPAQUE_ID)) .immutableMap() ); - threadLocal.set(threadContextStruct); - } else { - threadLocal.set(DEFAULT_CONTEXT); } + + if (context.transientHeaders.containsKey(TASK_ID)) { + threadContextStruct = threadContextStruct.putTransient(TASK_ID, context.transientHeaders.get(TASK_ID)); + } + + threadLocal.set(threadContextStruct); + return () -> { // If the node and thus the threadLocal get closed while this task // is still executing, we don't want this runnable to fail with an diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 46400e5c8d269..c929c7c013b13 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -37,6 +37,8 @@ import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.opensearch.index.IndexingPressureService; +import org.opensearch.tasks.TaskResourceTrackingService; +import org.opensearch.threadpool.RunnableTaskExecutionListener; import org.opensearch.watcher.ResourceWatcherService; import org.opensearch.Assertions; import org.opensearch.Build; @@ -213,6 +215,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.function.UnaryOperator; import java.util.stream.Collectors; @@ -324,6 +327,7 @@ public static class DiscoverySettings { private final LocalNodeFactory localNodeFactory; private final NodeService nodeService; final NamedWriteableRegistry namedWriteableRegistry; + private final AtomicReference runnableTaskListener; public Node(Environment environment) { this(environment, Collections.emptyList(), true); @@ -433,7 +437,8 @@ protected Node( final List> executorBuilders = pluginsService.getExecutorBuilders(settings); - final ThreadPool threadPool = new ThreadPool(settings, executorBuilders.toArray(new ExecutorBuilder[0])); + runnableTaskListener = new AtomicReference<>(); + final ThreadPool threadPool = new ThreadPool(settings, runnableTaskListener, executorBuilders.toArray(new ExecutorBuilder[0])); resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS)); final ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, threadPool); resourcesToClose.add(resourceWatcherService); @@ -1057,6 +1062,11 @@ public Node start() throws NodeValidationException { TransportService transportService = injector.getInstance(TransportService.class); transportService.getTaskManager().setTaskResultsService(injector.getInstance(TaskResultsService.class)); transportService.getTaskManager().setTaskCancellationService(new TaskCancellationService(transportService)); + + TaskResourceTrackingService taskResourceTrackingService = injector.getInstance(TaskResourceTrackingService.class); + transportService.getTaskManager().setTaskResourceTrackingService(taskResourceTrackingService); + runnableTaskListener.set(taskResourceTrackingService); + transportService.start(); assert localNodeFactory.getNode() != null; assert transportService.getLocalNode().equals(localNodeFactory.getNode()) @@ -1490,4 +1500,5 @@ DiscoveryNode getNode() { return localNode.get(); } } + } diff --git a/server/src/main/java/org/opensearch/tasks/Task.java b/server/src/main/java/org/opensearch/tasks/Task.java index 62453d08724ce..a51af17ae8ea2 100644 --- a/server/src/main/java/org/opensearch/tasks/Task.java +++ b/server/src/main/java/org/opensearch/tasks/Task.java @@ -32,8 +32,6 @@ package org.opensearch.tasks; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionResponse; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.io.stream.NamedWriteable; @@ -53,8 +51,6 @@ */ public class Task { - private static final Logger logger = LogManager.getLogger(Task.class); - /** * The request header to mark tasks with specific ids */ @@ -289,7 +285,7 @@ public void startThreadResourceTracking(long threadId, ResourceStatsType statsTy ); } } - threadResourceInfoList.add(new ThreadResourceInfo(statsType, resourceUsageMetrics)); + threadResourceInfoList.add(new ThreadResourceInfo(threadId, statsType, resourceUsageMetrics)); } /** @@ -336,6 +332,17 @@ public void stopThreadResourceTracking(long threadId, ResourceStatsType statsTyp throw new IllegalStateException("cannot update final values if active thread resource entry is not present"); } + /** + * Individual tasks can override this if they want to support task resource tracking. We just need to make sure that + * the ThreadPool on which the task runs on have runnable wrapper similar to + * {@link org.opensearch.common.util.concurrent.OpenSearchExecutors#newAutoQueueFixed} + * + * @return true if resource tracking is supported by the task + */ + public boolean supportsResourceTracking() { + return false; + } + /** * Report of the internal status of a task. These can vary wildly from task * to task because each task is implemented differently but we should try diff --git a/server/src/main/java/org/opensearch/tasks/TaskManager.java b/server/src/main/java/org/opensearch/tasks/TaskManager.java index 1f6169768f245..37c10dfc0e6ab 100644 --- a/server/src/main/java/org/opensearch/tasks/TaskManager.java +++ b/server/src/main/java/org/opensearch/tasks/TaskManager.java @@ -89,7 +89,9 @@ public class TaskManager implements ClusterStateApplier { private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100); - /** Rest headers that are copied to the task */ + /** + * Rest headers that are copied to the task + */ private final List taskHeaders; private final ThreadPool threadPool; @@ -103,6 +105,7 @@ public class TaskManager implements ClusterStateApplier { private final Map banedParents = new ConcurrentHashMap<>(); private TaskResultsService taskResultsService; + private final SetOnce taskResourceTrackingService = new SetOnce<>(); private volatile DiscoveryNodes lastDiscoveryNodes = DiscoveryNodes.EMPTY_NODES; @@ -125,6 +128,10 @@ public void setTaskCancellationService(TaskCancellationService taskCancellationS this.cancellationService.set(taskCancellationService); } + public void setTaskResourceTrackingService(TaskResourceTrackingService taskResourceTrackingService) { + this.taskResourceTrackingService.set(taskResourceTrackingService); + } + /** * Registers a task without parent task */ @@ -202,6 +209,11 @@ public void cancel(CancellableTask task, String reason, Runnable listener) { */ public Task unregister(Task task) { logger.trace("unregister task for id: {}", task.getId()); + + if (taskResourceTrackingService.get() != null && task.supportsResourceTracking()) { + taskResourceTrackingService.get().stopTracking(task); + } + if (task instanceof CancellableTask) { CancellableTaskHolder holder = cancellableTasks.remove(task.getId()); if (holder != null) { @@ -361,6 +373,7 @@ public int getBanCount() { * Bans all tasks with the specified parent task from execution, cancels all tasks that are currently executing. *

* This method is called when a parent task that has children is cancelled. + * * @return a list of pending cancellable child tasks */ public List setBan(TaskId parentTaskId, String reason) { @@ -448,6 +461,18 @@ public void waitForTaskCompletion(Task task, long untilInNanos) { throw new OpenSearchTimeoutException("Timed out waiting for completion of [{}]", task); } + /** + * Takes actions when a task is registered and its execution starts + * + * @param task getting executed. + * @return AutoCloseable to free up resources (clean up thread context) when task execution block returns + */ + public ThreadContext.StoredContext taskExecutionStarted(Task task) { + if (taskResourceTrackingService.get() == null) return () -> {}; + + return taskResourceTrackingService.get().startTracking(task); + } + private static class CancellableTaskHolder { private final CancellableTask task; private boolean finished = false; diff --git a/server/src/main/java/org/opensearch/tasks/TaskResourceTrackingService.java b/server/src/main/java/org/opensearch/tasks/TaskResourceTrackingService.java new file mode 100644 index 0000000000000..71b829e023385 --- /dev/null +++ b/server/src/main/java/org/opensearch/tasks/TaskResourceTrackingService.java @@ -0,0 +1,255 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.tasks; + +import com.sun.management.ThreadMXBean; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Setting; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ConcurrentCollections; +import org.opensearch.common.util.concurrent.ConcurrentMapLong; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.threadpool.RunnableTaskExecutionListener; +import org.opensearch.threadpool.ThreadPool; + +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.opensearch.tasks.ResourceStatsType.WORKER_STATS; + +/** + * Service that helps track resource usage of tasks running on a node. + */ +@SuppressForbidden(reason = "ThreadMXBean#getThreadAllocatedBytes") +public class TaskResourceTrackingService implements RunnableTaskExecutionListener { + + private static final Logger logger = LogManager.getLogger(TaskManager.class); + + public static final Setting TASK_RESOURCE_TRACKING_ENABLED = Setting.boolSetting( + "task_resource_tracking.enabled", + true, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + public static final String TASK_ID = "TASK_ID"; + + private static final ThreadMXBean threadMXBean = (ThreadMXBean) ManagementFactory.getThreadMXBean(); + + private final ConcurrentMapLong resourceAwareTasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); + private final ThreadPool threadPool; + private volatile boolean taskResourceTrackingEnabled; + + @Inject + public TaskResourceTrackingService(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool) { + this.taskResourceTrackingEnabled = TASK_RESOURCE_TRACKING_ENABLED.get(settings); + this.threadPool = threadPool; + clusterSettings.addSettingsUpdateConsumer(TASK_RESOURCE_TRACKING_ENABLED, this::setTaskResourceTrackingEnabled); + } + + public void setTaskResourceTrackingEnabled(boolean taskResourceTrackingEnabled) { + this.taskResourceTrackingEnabled = taskResourceTrackingEnabled; + } + + public boolean isTaskResourceTrackingEnabled() { + return taskResourceTrackingEnabled; + } + + public boolean isTaskResourceTrackingSupported() { + return threadMXBean.isThreadAllocatedMemorySupported() && threadMXBean.isThreadAllocatedMemoryEnabled(); + } + + /** + * Executes logic only if task supports resource tracking and resource tracking setting is enabled. + *

+ * 1. Starts tracking the task in map of resourceAwareTasks. + * 2. Adds Task Id in thread context to make sure it's available while task is processed across multiple threads. + * + * @param task for which resources needs to be tracked + * @return Autocloseable stored context to restore ThreadContext to the state before this method changed it. + */ + public ThreadContext.StoredContext startTracking(Task task) { + if (task.supportsResourceTracking() == false + || isTaskResourceTrackingEnabled() == false + || isTaskResourceTrackingSupported() == false) { + return () -> {}; + } + + logger.debug("Starting resource tracking for task: {}", task.getId()); + resourceAwareTasks.put(task.getId(), task); + return addTaskIdToThreadContext(task); + } + + /** + * Stops tracking task registered earlier for tracking. + *

+ * It doesn't have feature enabled check to avoid any issues if setting was disable while the task was in progress. + *

+ * It's also responsible to stop tracking the current thread's resources against this task if not already done. + * This happens when the thread executing the request logic itself calls the unregister method. So in this case unregister + * happens before runnable finishes. + * + * @param task task which has finished and doesn't need resource tracking. + */ + public void stopTracking(Task task) { + logger.debug("Stopping resource tracking for task: {}", task.getId()); + try { + if (isCurrentThreadWorkingOnTask(task)) { + taskExecutionFinishedOnThread(task.getId(), Thread.currentThread().getId()); + } + + List threadsWorkingOnTask = getThreadsWorkingOnTask(task); + if (threadsWorkingOnTask.size() > 0) { + logger.warn("No thread should be active when task finishes. Active threads: {}", threadsWorkingOnTask); + assert false : "No thread should be marked active when task finishes"; + } + } catch (Exception e) { + logger.warn("Failed while trying to mark the task execution on current thread completed.", e); + assert false; + } finally { + resourceAwareTasks.remove(task.getId()); + } + } + + /** + * Refreshes the resource stats for the tasks provided by looking into which threads are actively working on these + * and how much resources these have consumed till now. + * + * @param tasks for which resource stats needs to be refreshed. + */ + public void refreshResourceStats(Task... tasks) { + if (isTaskResourceTrackingEnabled() == false || isTaskResourceTrackingSupported() == false) { + return; + } + + for (Task task : tasks) { + if (task.supportsResourceTracking() && resourceAwareTasks.containsKey(task.getId())) { + refreshResourceStats(task); + } + } + } + + private void refreshResourceStats(Task resourceAwareTask) { + try { + logger.debug("Refreshing resource stats for Task: {}", resourceAwareTask.getId()); + List threadsWorkingOnTask = getThreadsWorkingOnTask(resourceAwareTask); + threadsWorkingOnTask.forEach( + threadId -> resourceAwareTask.updateThreadResourceStats(threadId, WORKER_STATS, getResourceUsageMetricsForThread(threadId)) + ); + } catch (IllegalStateException e) { + logger.debug("Resource stats already updated."); + } + + } + + /** + * Called when a thread starts working on a task's runnable. + * + * @param taskId of the task for which runnable is starting + * @param threadId of the thread which will be executing the runnable and we need to check resource usage for this + * thread + */ + @Override + public void taskExecutionStartedOnThread(long taskId, long threadId) { + try { + if (resourceAwareTasks.containsKey(taskId)) { + logger.debug("Task execution started on thread. Task: {}, Thread: {}", taskId, threadId); + + resourceAwareTasks.get(taskId) + .startThreadResourceTracking(threadId, WORKER_STATS, getResourceUsageMetricsForThread(threadId)); + } + } catch (Exception e) { + logger.warn(new ParameterizedMessage("Failed to mark thread execution started for task: [{}]", taskId), e); + assert false; + } + + } + + /** + * Called when a thread finishes working on a task's runnable. + * + * @param taskId of the task for which runnable is complete + * @param threadId of the thread which executed the runnable and we need to check resource usage for this thread + */ + @Override + public void taskExecutionFinishedOnThread(long taskId, long threadId) { + try { + if (resourceAwareTasks.containsKey(taskId)) { + logger.debug("Task execution finished on thread. Task: {}, Thread: {}", taskId, threadId); + resourceAwareTasks.get(taskId) + .stopThreadResourceTracking(threadId, WORKER_STATS, getResourceUsageMetricsForThread(threadId)); + } + } catch (Exception e) { + logger.warn(new ParameterizedMessage("Failed to mark thread execution finished for task: [{}]", taskId), e); + assert false; + } + } + + public Map getResourceAwareTasks() { + return Collections.unmodifiableMap(resourceAwareTasks); + } + + private ResourceUsageMetric[] getResourceUsageMetricsForThread(long threadId) { + ResourceUsageMetric currentMemoryUsage = new ResourceUsageMetric( + ResourceStats.MEMORY, + threadMXBean.getThreadAllocatedBytes(threadId) + ); + ResourceUsageMetric currentCPUUsage = new ResourceUsageMetric(ResourceStats.CPU, threadMXBean.getThreadCpuTime(threadId)); + return new ResourceUsageMetric[] { currentMemoryUsage, currentCPUUsage }; + } + + private boolean isCurrentThreadWorkingOnTask(Task task) { + long threadId = Thread.currentThread().getId(); + List threadResourceInfos = task.getResourceStats().getOrDefault(threadId, Collections.emptyList()); + + for (ThreadResourceInfo threadResourceInfo : threadResourceInfos) { + if (threadResourceInfo.isActive()) { + return true; + } + } + return false; + } + + private List getThreadsWorkingOnTask(Task task) { + List activeThreads = new ArrayList<>(); + for (List threadResourceInfos : task.getResourceStats().values()) { + for (ThreadResourceInfo threadResourceInfo : threadResourceInfos) { + if (threadResourceInfo.isActive()) { + activeThreads.add(threadResourceInfo.getThreadId()); + } + } + } + return activeThreads; + } + + /** + * Adds Task Id in the ThreadContext. + *

+ * Stashes the existing ThreadContext and preserves all the existing ThreadContext's data in the new ThreadContext + * as well. + * + * @param task for which Task Id needs to be added in ThreadContext. + * @return StoredContext reference to restore the ThreadContext from which we created a new one. + * Caller can call context.restore() to get the existing ThreadContext back. + */ + private ThreadContext.StoredContext addTaskIdToThreadContext(Task task) { + ThreadContext threadContext = threadPool.getThreadContext(); + ThreadContext.StoredContext storedContext = threadContext.newStoredContext(true, Collections.singletonList(TASK_ID)); + threadContext.putTransient(TASK_ID, task.getId()); + return storedContext; + } + +} diff --git a/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java b/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java index 8b45c38c8fb63..9ee683e3928f6 100644 --- a/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java +++ b/server/src/main/java/org/opensearch/tasks/ThreadResourceInfo.java @@ -15,11 +15,13 @@ * for a specific stats type like worker_stats or response_stats etc., */ public class ThreadResourceInfo { + private final long threadId; private volatile boolean isActive = true; private final ResourceStatsType statsType; private final ResourceUsageInfo resourceUsageInfo; - public ThreadResourceInfo(ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) { + public ThreadResourceInfo(long threadId, ResourceStatsType statsType, ResourceUsageMetric... resourceUsageMetrics) { + this.threadId = threadId; this.statsType = statsType; this.resourceUsageInfo = new ResourceUsageInfo(resourceUsageMetrics); } @@ -43,12 +45,16 @@ public ResourceStatsType getStatsType() { return statsType; } + public long getThreadId() { + return threadId; + } + public ResourceUsageInfo getResourceUsageInfo() { return resourceUsageInfo; } @Override public String toString() { - return resourceUsageInfo + ", stats_type=" + statsType + ", is_active=" + isActive; + return resourceUsageInfo + ", stats_type=" + statsType + ", is_active=" + isActive + ", threadId=" + threadId; } } diff --git a/server/src/main/java/org/opensearch/threadpool/AutoQueueAdjustingExecutorBuilder.java b/server/src/main/java/org/opensearch/threadpool/AutoQueueAdjustingExecutorBuilder.java index 2bac5eba9fc28..55b92c5d8bfcb 100644 --- a/server/src/main/java/org/opensearch/threadpool/AutoQueueAdjustingExecutorBuilder.java +++ b/server/src/main/java/org/opensearch/threadpool/AutoQueueAdjustingExecutorBuilder.java @@ -48,6 +48,7 @@ import java.util.Map; import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; +import java.util.concurrent.atomic.AtomicReference; /** * A builder for executors that automatically adjust the queue length as needed, depending on @@ -61,6 +62,7 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder maxQueueSizeSetting; private final Setting targetedResponseTimeSetting; private final Setting frameSizeSetting; + private final AtomicReference runnableTaskListener; AutoQueueAdjustingExecutorBuilder( final Settings settings, @@ -70,6 +72,19 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder runnableTaskListener ) { super(name); final String prefix = "thread_pool." + name; @@ -184,6 +199,7 @@ public Iterator> settings() { Setting.Property.Deprecated, Setting.Property.Deprecated ); + this.runnableTaskListener = runnableTaskListener; } @Override @@ -230,7 +246,8 @@ ThreadPool.ExecutorHolder build(final AutoExecutorSettings settings, final Threa frameSize, targetedResponseTime, threadFactory, - threadContext + threadContext, + runnableTaskListener ); // TODO: in a subsequent change we hope to extend ThreadPool.Info to be more specific for the thread pool type final ThreadPool.Info info = new ThreadPool.Info( diff --git a/server/src/main/java/org/opensearch/threadpool/RunnableTaskExecutionListener.java b/server/src/main/java/org/opensearch/threadpool/RunnableTaskExecutionListener.java new file mode 100644 index 0000000000000..03cd66f80d044 --- /dev/null +++ b/server/src/main/java/org/opensearch/threadpool/RunnableTaskExecutionListener.java @@ -0,0 +1,33 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.threadpool; + +/** + * Listener for events when a runnable execution starts or finishes on a thread and is aware of the task for which the + * runnable is associated to. + */ +public interface RunnableTaskExecutionListener { + + /** + * Sends an update when ever a task's execution start on a thread + * + * @param taskId of task which has started + * @param threadId of thread which is executing the task + */ + void taskExecutionStartedOnThread(long taskId, long threadId); + + /** + * + * Sends an update when task execution finishes on a thread + * + * @param taskId of task which has finished + * @param threadId of thread which executed the task + */ + void taskExecutionFinishedOnThread(long taskId, long threadId); +} diff --git a/server/src/main/java/org/opensearch/threadpool/TaskAwareRunnable.java b/server/src/main/java/org/opensearch/threadpool/TaskAwareRunnable.java new file mode 100644 index 0000000000000..183b9b2f4cf9a --- /dev/null +++ b/server/src/main/java/org/opensearch/threadpool/TaskAwareRunnable.java @@ -0,0 +1,90 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.threadpool; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.ExceptionsHelper; +import org.opensearch.common.util.concurrent.AbstractRunnable; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.common.util.concurrent.WrappedRunnable; +import org.opensearch.tasks.TaskManager; + +import java.util.Objects; +import java.util.concurrent.atomic.AtomicReference; + +import static java.lang.Thread.currentThread; +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; + +/** + * Responsible for wrapping the original task's runnable and sending updates on when it starts and finishes to + * entities listening to the events. + * + * It's able to associate runnable with a task with the help of task Id available in thread context. + */ +public class TaskAwareRunnable extends AbstractRunnable implements WrappedRunnable { + + private static final Logger logger = LogManager.getLogger(TaskManager.class); + + private final Runnable original; + private final ThreadContext threadContext; + private final AtomicReference runnableTaskListener; + + public TaskAwareRunnable( + final ThreadContext threadContext, + final Runnable original, + final AtomicReference runnableTaskListener + ) { + this.original = original; + this.threadContext = threadContext; + this.runnableTaskListener = runnableTaskListener; + } + + @Override + public void onFailure(Exception e) { + ExceptionsHelper.reThrowIfNotNull(e); + } + + @Override + public boolean isForceExecution() { + return original instanceof AbstractRunnable && ((AbstractRunnable) original).isForceExecution(); + } + + @Override + public void onRejection(final Exception e) { + if (original instanceof AbstractRunnable) { + ((AbstractRunnable) original).onRejection(e); + } else { + ExceptionsHelper.reThrowIfNotNull(e); + } + } + + @Override + protected void doRun() throws Exception { + assert runnableTaskListener.get() != null : "Listener should be attached"; + Long taskId = threadContext.getTransient(TASK_ID); + if (Objects.nonNull(taskId)) { + runnableTaskListener.get().taskExecutionStartedOnThread(taskId, currentThread().getId()); + } else { + logger.debug("Task Id not available in thread context. Skipping update. Thread Info: {}", Thread.currentThread()); + } + try { + original.run(); + } finally { + if (Objects.nonNull(taskId)) { + runnableTaskListener.get().taskExecutionFinishedOnThread(taskId, currentThread().getId()); + } + } + } + + @Override + public Runnable unwrap() { + return original; + } +} diff --git a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java index c2530ccee5588..5e8f515f6c577 100644 --- a/server/src/main/java/org/opensearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/opensearch/threadpool/ThreadPool.java @@ -68,6 +68,7 @@ import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; @@ -184,6 +185,14 @@ public Collection builders() { ); public ThreadPool(final Settings settings, final ExecutorBuilder... customBuilders) { + this(settings, null, customBuilders); + } + + public ThreadPool( + final Settings settings, + final AtomicReference runnableTaskListener, + final ExecutorBuilder... customBuilders + ) { assert Node.NODE_NAME_SETTING.exists(settings); final Map builders = new HashMap<>(); @@ -197,11 +206,20 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16)); builders.put( Names.SEARCH, - new AutoQueueAdjustingExecutorBuilder(settings, Names.SEARCH, searchThreadPoolSize(allocatedProcessors), 1000, 1000, 1000, 2000) + new AutoQueueAdjustingExecutorBuilder( + settings, + Names.SEARCH, + searchThreadPoolSize(allocatedProcessors), + 1000, + 1000, + 1000, + 2000, + runnableTaskListener + ) ); builders.put( Names.SEARCH_THROTTLED, - new AutoQueueAdjustingExecutorBuilder(settings, Names.SEARCH_THROTTLED, 1, 100, 100, 100, 200) + new AutoQueueAdjustingExecutorBuilder(settings, Names.SEARCH_THROTTLED, 1, 100, 100, 100, 200, runnableTaskListener) ); builders.put(Names.MANAGEMENT, new ScalingExecutorBuilder(Names.MANAGEMENT, 1, 5, TimeValue.timeValueMinutes(5))); // no queue as this means clients will need to handle rejections on listener queue even if the operation succeeded diff --git a/server/src/main/java/org/opensearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/opensearch/transport/RequestHandlerRegistry.java index dcb021531f0ac..73be6e5b601e9 100644 --- a/server/src/main/java/org/opensearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/opensearch/transport/RequestHandlerRegistry.java @@ -37,6 +37,7 @@ import org.opensearch.common.lease.Releasable; import org.opensearch.common.lease.Releasables; import org.opensearch.search.internal.ShardSearchRequest; +import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.tasks.CancellableTask; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskManager; @@ -81,6 +82,8 @@ public Request newRequest(StreamInput in) throws IOException { public void processMessageReceived(Request request, TransportChannel channel) throws Exception { final Task task = taskManager.register(channel.getChannelType(), action, request); + ThreadContext.StoredContext contextToRestore = taskManager.taskExecutionStarted(task); + Releasable unregisterTask = () -> taskManager.unregister(task); try { if (channel instanceof TcpTransportChannel && task instanceof CancellableTask) { @@ -99,6 +102,7 @@ public void processMessageReceived(Request request, TransportChannel channel) th unregisterTask = null; } finally { Releasables.close(unregisterTask); + contextToRestore.restore(); } } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java index 7756eb12bb3f4..9bd44185baf24 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/RecordingTaskManagerListener.java @@ -75,6 +75,9 @@ public synchronized void onTaskUnregistered(Task task) { @Override public void waitForTaskCompletion(Task task) {} + @Override + public void taskExecutionStarted(Task task, Boolean closeableInvoked) {} + public synchronized List> getEvents() { return Collections.unmodifiableList(new ArrayList<>(events)); } diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java new file mode 100644 index 0000000000000..23877ac0b7395 --- /dev/null +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/ResourceAwareTasksTests.java @@ -0,0 +1,633 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.action.admin.cluster.node.tasks; + +import com.sun.management.ThreadMXBean; +import org.opensearch.ExceptionsHelper; +import org.opensearch.action.ActionListener; +import org.opensearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; +import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksResponse; +import org.opensearch.action.support.ActionTestUtils; +import org.opensearch.action.support.nodes.BaseNodeRequest; +import org.opensearch.action.support.nodes.BaseNodesRequest; +import org.opensearch.cluster.service.ClusterService; +import org.opensearch.common.SuppressForbidden; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.AbstractRunnable; +import org.opensearch.tasks.CancellableTask; +import org.opensearch.tasks.Task; +import org.opensearch.tasks.TaskCancelledException; +import org.opensearch.tasks.TaskId; +import org.opensearch.tasks.TaskInfo; +import org.opensearch.test.tasks.MockTaskManager; +import org.opensearch.test.tasks.MockTaskManagerListener; +import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.TransportService; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; + +@SuppressForbidden(reason = "ThreadMXBean#getThreadAllocatedBytes") +public class ResourceAwareTasksTests extends TaskManagerTestCase { + + private static final ThreadMXBean threadMXBean = (ThreadMXBean) ManagementFactory.getThreadMXBean(); + + public static class ResourceAwareNodeRequest extends BaseNodeRequest { + protected String requestName; + + public ResourceAwareNodeRequest() { + super(); + } + + public ResourceAwareNodeRequest(StreamInput in) throws IOException { + super(in); + requestName = in.readString(); + } + + public ResourceAwareNodeRequest(NodesRequest request) { + requestName = request.requestName; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + } + + @Override + public String getDescription() { + return "ResourceAwareNodeRequest[" + requestName + "]"; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers) { + @Override + public boolean shouldCancelChildrenOnCancellation() { + return false; + } + + @Override + public boolean supportsResourceTracking() { + return true; + } + }; + } + } + + public static class NodesRequest extends BaseNodesRequest { + private final String requestName; + + private NodesRequest(StreamInput in) throws IOException { + super(in); + requestName = in.readString(); + } + + public NodesRequest(String requestName, String... nodesIds) { + super(nodesIds); + this.requestName = requestName; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(requestName); + } + + @Override + public String getDescription() { + return "NodesRequest[" + requestName + "]"; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, getDescription(), parentTaskId, headers) { + @Override + public boolean shouldCancelChildrenOnCancellation() { + return true; + } + }; + } + } + + /** + * Simulates a task which executes work on search executor. + */ + class ResourceAwareNodesAction extends AbstractTestNodesAction { + private final TaskTestContext taskTestContext; + private final boolean blockForCancellation; + + ResourceAwareNodesAction( + String actionName, + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + boolean shouldBlock, + TaskTestContext taskTestContext + ) { + super(actionName, threadPool, clusterService, transportService, NodesRequest::new, ResourceAwareNodeRequest::new); + this.taskTestContext = taskTestContext; + this.blockForCancellation = shouldBlock; + } + + @Override + protected ResourceAwareNodeRequest newNodeRequest(NodesRequest request) { + return new ResourceAwareNodeRequest(request); + } + + @Override + protected NodeResponse nodeOperation(ResourceAwareNodeRequest request, Task task) { + assert task.supportsResourceTracking(); + + AtomicLong threadId = new AtomicLong(); + Future result = threadPool.executor(ThreadPool.Names.SEARCH).submit(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + ExceptionsHelper.reThrowIfNotNull(e); + } + + @Override + @SuppressForbidden(reason = "ThreadMXBean#getThreadAllocatedBytes") + protected void doRun() { + taskTestContext.memoryConsumptionWhenExecutionStarts = threadMXBean.getThreadAllocatedBytes( + Thread.currentThread().getId() + ); + threadId.set(Thread.currentThread().getId()); + + if (taskTestContext.operationStartValidator != null) { + try { + taskTestContext.operationStartValidator.accept(threadId.get()); + } catch (AssertionError error) { + throw new RuntimeException(error); + } + } + + Object[] allocation1 = new Object[1000000]; // 4MB + + if (blockForCancellation) { + // Simulate a job that takes forever to finish + // Using periodic checks method to identify that the task was cancelled + try { + boolean taskCancelled = waitUntil(((CancellableTask) task)::isCancelled); + if (taskCancelled) { + throw new TaskCancelledException("Task Cancelled"); + } else { + fail("It should have thrown an exception"); + } + } catch (InterruptedException ex) { + Thread.currentThread().interrupt(); + } + + } + + Object[] allocation2 = new Object[1000000]; // 4MB + } + }); + + try { + result.get(); + } catch (InterruptedException | ExecutionException e) { + throw new RuntimeException(e.getCause()); + } finally { + if (taskTestContext.operationFinishedValidator != null) { + taskTestContext.operationFinishedValidator.accept(threadId.get()); + } + } + + return new NodeResponse(clusterService.localNode()); + } + + @Override + protected NodeResponse nodeOperation(ResourceAwareNodeRequest request) { + throw new UnsupportedOperationException("the task parameter is required"); + } + } + + private TaskTestContext startResourceAwareNodesAction( + TestNode node, + boolean blockForCancellation, + TaskTestContext taskTestContext, + ActionListener listener + ) { + NodesRequest request = new NodesRequest("Test Request", node.getNodeId()); + + taskTestContext.requestCompleteLatch = new CountDownLatch(1); + + ResourceAwareNodesAction action = new ResourceAwareNodesAction( + "internal:resourceAction", + threadPool, + node.clusterService, + node.transportService, + blockForCancellation, + taskTestContext + ); + taskTestContext.mainTask = action.execute(request, listener); + return taskTestContext; + } + + private static class TaskTestContext { + private Task mainTask; + private CountDownLatch requestCompleteLatch; + private Consumer operationStartValidator; + private Consumer operationFinishedValidator; + private long memoryConsumptionWhenExecutionStarts; + } + + public void testBasicTaskResourceTracking() throws Exception { + setup(true, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + + // One thread is currently working on task but not finished + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertTrue(task.getResourceStats().get(threadId).get(0).isActive()); + assertEquals(0, task.getTotalResourceStats().getCpuTimeInNanos()); + assertEquals(0, task.getTotalResourceStats().getMemoryInBytes()); + }; + + taskTestContext.operationFinishedValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + + // Thread has finished working on the task's runnable + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertFalse(task.getResourceStats().get(threadId).get(0).isActive()); + + long expectedArrayAllocationOverhead = 2 * 4012688; // Task's memory overhead due to array allocations + long actualTaskMemoryOverhead = task.getTotalResourceStats().getMemoryInBytes(); + + assertTrue(actualTaskMemoryOverhead - expectedArrayAllocationOverhead < taskTestContext.memoryConsumptionWhenExecutionStarts); + assertTrue(task.getTotalResourceStats().getCpuTimeInNanos() > 0); + }; + + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertTasksRequestFinishedSuccessfully(resourceTasks.size(), responseReference.get(), throwableReference.get()); + } + + public void testTaskResourceTrackingDuringTaskCancellation() throws Exception { + setup(true, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + + // One thread is currently working on task but not finished + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertTrue(task.getResourceStats().get(threadId).get(0).isActive()); + assertEquals(0, task.getTotalResourceStats().getCpuTimeInNanos()); + assertEquals(0, task.getTotalResourceStats().getMemoryInBytes()); + }; + + taskTestContext.operationFinishedValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + + // Thread has finished working on the task's runnable + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertFalse(task.getResourceStats().get(threadId).get(0).isActive()); + + // allocations are completed before the task is cancelled + long expectedArrayAllocationOverhead = 4012688; // Task's memory overhead due to array allocations + long taskCancellationOverhead = 30000; // Task cancellation overhead ~ 30Kb + long actualTaskMemoryOverhead = task.getTotalResourceStats().getMemoryInBytes(); + + long expectedOverhead = expectedArrayAllocationOverhead + taskCancellationOverhead; + assertTrue(actualTaskMemoryOverhead - expectedOverhead < taskTestContext.memoryConsumptionWhenExecutionStarts); + assertTrue(task.getTotalResourceStats().getCpuTimeInNanos() > 0); + }; + + startResourceAwareNodesAction(testNodes[0], true, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Cancel main task + CancelTasksRequest request = new CancelTasksRequest(); + request.setReason("Cancelling request to verify Task resource tracking behaviour"); + request.setTaskId(new TaskId(testNodes[0].getNodeId(), taskTestContext.mainTask.getId())); + ActionTestUtils.executeBlocking(testNodes[0].transportCancelTasksAction, request); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertEquals(0, resourceTasks.size()); + assertNull(throwableReference.get()); + assertNotNull(responseReference.get()); + assertEquals(1, responseReference.get().failureCount()); + assertEquals(TaskCancelledException.class, findActualException(responseReference.get().failures().get(0)).getClass()); + } + + public void testTaskResourceTrackingDisabled() throws Exception { + setup(false, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { assertEquals(0, resourceTasks.size()); }; + + taskTestContext.operationFinishedValidator = threadId -> { assertEquals(0, resourceTasks.size()); }; + + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertTasksRequestFinishedSuccessfully(resourceTasks.size(), responseReference.get(), throwableReference.get()); + } + + public void testTaskResourceTrackingDisabledWhileTaskInProgress() throws Exception { + setup(true, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + // One thread is currently working on task but not finished + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertTrue(task.getResourceStats().get(threadId).get(0).isActive()); + assertEquals(0, task.getTotalResourceStats().getCpuTimeInNanos()); + assertEquals(0, task.getTotalResourceStats().getMemoryInBytes()); + + testNodes[0].taskResourceTrackingService.setTaskResourceTrackingEnabled(false); + }; + + taskTestContext.operationFinishedValidator = threadId -> { + Task task = resourceTasks.values().stream().findAny().get(); + // Thread has finished working on the task's runnable + assertEquals(1, resourceTasks.size()); + assertEquals(1, task.getResourceStats().size()); + assertEquals(1, task.getResourceStats().get(threadId).size()); + assertFalse(task.getResourceStats().get(threadId).get(0).isActive()); + + long expectedArrayAllocationOverhead = 2 * 4012688; // Task's memory overhead due to array allocations + long actualTaskMemoryOverhead = task.getTotalResourceStats().getMemoryInBytes(); + + assertTrue(actualTaskMemoryOverhead - expectedArrayAllocationOverhead < taskTestContext.memoryConsumptionWhenExecutionStarts); + assertTrue(task.getTotalResourceStats().getCpuTimeInNanos() > 0); + }; + + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertTasksRequestFinishedSuccessfully(resourceTasks.size(), responseReference.get(), throwableReference.get()); + } + + public void testTaskResourceTrackingEnabledWhileTaskInProgress() throws Exception { + setup(false, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { + assertEquals(0, resourceTasks.size()); + + testNodes[0].taskResourceTrackingService.setTaskResourceTrackingEnabled(true); + }; + + taskTestContext.operationFinishedValidator = threadId -> { assertEquals(0, resourceTasks.size()); }; + + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertTasksRequestFinishedSuccessfully(resourceTasks.size(), responseReference.get(), throwableReference.get()); + } + + public void testOnDemandRefreshWhileFetchingTasks() throws InterruptedException { + setup(true, false); + + final AtomicReference throwableReference = new AtomicReference<>(); + final AtomicReference responseReference = new AtomicReference<>(); + + TaskTestContext taskTestContext = new TaskTestContext(); + + Map resourceTasks = testNodes[0].taskResourceTrackingService.getResourceAwareTasks(); + + taskTestContext.operationStartValidator = threadId -> { + ListTasksResponse listTasksResponse = ActionTestUtils.executeBlocking( + testNodes[0].transportListTasksAction, + new ListTasksRequest().setActions("internal:resourceAction*").setDetailed(true) + ); + + TaskInfo taskInfo = listTasksResponse.getTasks().get(1); + + assertNotNull(taskInfo.getResourceStats()); + assertNotNull(taskInfo.getResourceStats().getResourceUsageInfo()); + assertTrue(taskInfo.getResourceStats().getResourceUsageInfo().get("total").getCpuTimeInNanos() > 0); + assertTrue(taskInfo.getResourceStats().getResourceUsageInfo().get("total").getMemoryInBytes() > 0); + }; + + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + // Waiting for whole request to complete and return successfully till client + taskTestContext.requestCompleteLatch.await(); + + assertTasksRequestFinishedSuccessfully(resourceTasks.size(), responseReference.get(), throwableReference.get()); + } + + public void testTaskIdPersistsInThreadContext() throws InterruptedException { + setup(true, true); + + final List taskIdsAddedToThreadContext = new ArrayList<>(); + final List taskIdsRemovedFromThreadContext = new ArrayList<>(); + AtomicLong actualTaskIdInThreadContext = new AtomicLong(-1); + AtomicLong expectedTaskIdInThreadContext = new AtomicLong(-2); + + ((MockTaskManager) testNodes[0].transportService.getTaskManager()).addListener(new MockTaskManagerListener() { + @Override + public void waitForTaskCompletion(Task task) {} + + @Override + public void taskExecutionStarted(Task task, Boolean closeableInvoked) { + if (closeableInvoked) { + taskIdsRemovedFromThreadContext.add(task.getId()); + } else { + taskIdsAddedToThreadContext.add(task.getId()); + } + } + + @Override + public void onTaskRegistered(Task task) {} + + @Override + public void onTaskUnregistered(Task task) { + if (task.getAction().equals("internal:resourceAction[n]")) { + expectedTaskIdInThreadContext.set(task.getId()); + actualTaskIdInThreadContext.set(threadPool.getThreadContext().getTransient(TASK_ID)); + } + } + }); + + TaskTestContext taskTestContext = new TaskTestContext(); + startResourceAwareNodesAction(testNodes[0], false, taskTestContext, new ActionListener() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + taskTestContext.requestCompleteLatch.countDown(); + } + + @Override + public void onFailure(Exception e) { + taskTestContext.requestCompleteLatch.countDown(); + } + }); + + taskTestContext.requestCompleteLatch.await(); + + assertEquals(expectedTaskIdInThreadContext.get(), actualTaskIdInThreadContext.get()); + assertThat(taskIdsAddedToThreadContext, containsInAnyOrder(taskIdsRemovedFromThreadContext.toArray())); + } + + private void setup(boolean resourceTrackingEnabled, boolean useMockTaskManager) { + Settings settings = Settings.builder() + .put("task_resource_tracking.enabled", resourceTrackingEnabled) + .put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), useMockTaskManager) + .build(); + setupTestNodes(settings); + connectNodes(testNodes[0]); + + runnableTaskListener.set(testNodes[0].taskResourceTrackingService); + } + + private Throwable findActualException(Exception e) { + Throwable throwable = e.getCause(); + while (throwable.getCause() != null) { + throwable = throwable.getCause(); + } + return throwable; + } + + private void assertTasksRequestFinishedSuccessfully(int activeResourceTasks, NodesResponse nodesResponse, Throwable throwable) { + assertEquals(0, activeResourceTasks); + assertNull(throwable); + assertNotNull(nodesResponse); + assertEquals(0, nodesResponse.failureCount()); + } + +} diff --git a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index c8411b31e0709..51fc5d80f2de3 100644 --- a/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/opensearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -59,8 +59,10 @@ import org.opensearch.indices.breaker.NoneCircuitBreakerService; import org.opensearch.tasks.TaskCancellationService; import org.opensearch.tasks.TaskManager; +import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.tasks.MockTaskManager; +import org.opensearch.threadpool.RunnableTaskExecutionListener; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -74,6 +76,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import static java.util.Collections.emptyMap; @@ -89,10 +92,12 @@ public abstract class TaskManagerTestCase extends OpenSearchTestCase { protected ThreadPool threadPool; protected TestNode[] testNodes; protected int nodesCount; + protected AtomicReference runnableTaskListener; @Before public void setupThreadPool() { - threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName()); + runnableTaskListener = new AtomicReference<>(); + threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName(), runnableTaskListener); } public void setupTestNodes(Settings settings) { @@ -225,14 +230,22 @@ protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool transportService.start(); clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); + taskResourceTrackingService = new TaskResourceTrackingService(settings, clusterService.getClusterSettings(), threadPool); + transportService.getTaskManager().setTaskResourceTrackingService(taskResourceTrackingService); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(clusterService, transportService, actionFilters); + transportListTasksAction = new TransportListTasksAction( + clusterService, + transportService, + actionFilters, + taskResourceTrackingService + ); transportCancelTasksAction = new TransportCancelTasksAction(clusterService, transportService, actionFilters); transportService.acceptIncomingRequests(); } public final ClusterService clusterService; public final TransportService transportService; + public final TaskResourceTrackingService taskResourceTrackingService; private final SetOnce discoveryNode = new SetOnce<>(); public final TransportListTasksAction transportListTasksAction; public final TransportCancelTasksAction transportCancelTasksAction; diff --git a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java index 4b98870422ce8..202f1b7dcb5b4 100644 --- a/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/opensearch/action/bulk/TransportBulkActionIngestTests.java @@ -91,6 +91,7 @@ import static java.util.Collections.emptyMap; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Answers.RETURNS_MOCKS; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.anyString; @@ -224,7 +225,7 @@ public void setupAction() { remoteResponseHandler = ArgumentCaptor.forClass(TransportResponseHandler.class); // setup services that will be called by action - transportService = mock(TransportService.class); + transportService = mock(TransportService.class, RETURNS_MOCKS); clusterService = mock(ClusterService.class); localIngest = true; // setup nodes for local and remote diff --git a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java index 9c70accaca3e4..64286e47b4966 100644 --- a/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java +++ b/server/src/test/java/org/opensearch/common/util/concurrent/ThreadContextTests.java @@ -48,6 +48,7 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.sameInstance; +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; public class ThreadContextTests extends OpenSearchTestCase { @@ -154,6 +155,15 @@ public void testNewContextWithClearedTransients() { assertEquals(1, threadContext.getResponseHeaders().get("baz").size()); } + public void testStashContextWithPreservedTransients() { + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + threadContext.putTransient("foo", "bar"); + threadContext.putTransient(TASK_ID, 1); + threadContext.stashContext(); + assertNull(threadContext.getTransient("foo")); + assertEquals(1, (int) threadContext.getTransient(TASK_ID)); + } + public void testStashWithOrigin() { final String origin = randomAlphaOfLengthBetween(4, 16); final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index a896aab0f70c9..5f303bc774930 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -198,6 +198,7 @@ import org.opensearch.search.fetch.FetchPhase; import org.opensearch.search.query.QueryPhase; import org.opensearch.snapshots.mockstore.MockEventuallyConsistentRepository; +import org.opensearch.tasks.TaskResourceTrackingService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.disruption.DisruptableMockTransport; import org.opensearch.threadpool.ThreadPool; @@ -1738,6 +1739,8 @@ public void onFailure(final Exception e) { final IndexNameExpressionResolver indexNameExpressionResolver = new IndexNameExpressionResolver( new ThreadContext(Settings.EMPTY) ); + transportService.getTaskManager() + .setTaskResourceTrackingService(new TaskResourceTrackingService(settings, clusterSettings, threadPool)); repositoriesService = new RepositoriesService( settings, clusterService, diff --git a/server/src/test/java/org/opensearch/tasks/TaskManagerTests.java b/server/src/test/java/org/opensearch/tasks/TaskManagerTests.java index 0f09b0de34206..ab49109eb8247 100644 --- a/server/src/test/java/org/opensearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/opensearch/tasks/TaskManagerTests.java @@ -40,6 +40,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ConcurrentCollections; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.RunnableTaskExecutionListener; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.FakeTcpChannel; @@ -59,6 +60,7 @@ import java.util.Set; import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; @@ -67,10 +69,12 @@ public class TaskManagerTests extends OpenSearchTestCase { private ThreadPool threadPool; + private AtomicReference runnableTaskListener; @Before public void setupThreadPool() { - threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName()); + runnableTaskListener = new AtomicReference<>(); + threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName(), runnableTaskListener); } @After diff --git a/server/src/test/java/org/opensearch/tasks/TaskResourceTrackingServiceTests.java b/server/src/test/java/org/opensearch/tasks/TaskResourceTrackingServiceTests.java new file mode 100644 index 0000000000000..8ba23c5d3219c --- /dev/null +++ b/server/src/test/java/org/opensearch/tasks/TaskResourceTrackingServiceTests.java @@ -0,0 +1,97 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.tasks; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.action.admin.cluster.node.tasks.TransportTasksActionTests; +import org.opensearch.action.search.SearchTask; +import org.opensearch.common.settings.ClusterSettings; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.threadpool.TestThreadPool; +import org.opensearch.threadpool.ThreadPool; + +import java.util.HashMap; +import java.util.concurrent.atomic.AtomicReference; + +import static org.opensearch.tasks.ResourceStats.MEMORY; +import static org.opensearch.tasks.TaskResourceTrackingService.TASK_ID; + +public class TaskResourceTrackingServiceTests extends OpenSearchTestCase { + + private ThreadPool threadPool; + private TaskResourceTrackingService taskResourceTrackingService; + + @Before + public void setup() { + threadPool = new TestThreadPool(TransportTasksActionTests.class.getSimpleName(), new AtomicReference<>()); + taskResourceTrackingService = new TaskResourceTrackingService( + Settings.EMPTY, + new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS), + threadPool + ); + } + + @After + public void terminateThreadPool() { + terminate(threadPool); + } + + public void testThreadContextUpdateOnTrackingStart() { + taskResourceTrackingService.setTaskResourceTrackingEnabled(true); + + Task task = new SearchTask(1, "test", "test", () -> "Test", TaskId.EMPTY_TASK_ID, new HashMap<>()); + + String key = "KEY"; + String value = "VALUE"; + + // Prepare thread context + threadPool.getThreadContext().putHeader(key, value); + threadPool.getThreadContext().putTransient(key, value); + threadPool.getThreadContext().addResponseHeader(key, value); + + ThreadContext.StoredContext storedContext = taskResourceTrackingService.startTracking(task); + + // All headers should be preserved and Task Id should also be included in thread context + verifyThreadContextFixedHeaders(key, value); + assertEquals((long) threadPool.getThreadContext().getTransient(TASK_ID), task.getId()); + + storedContext.restore(); + + // Post restore only task id should be removed from the thread context + verifyThreadContextFixedHeaders(key, value); + assertNull(threadPool.getThreadContext().getTransient(TASK_ID)); + } + + public void testStopTrackingHandlesCurrentActiveThread() { + taskResourceTrackingService.setTaskResourceTrackingEnabled(true); + Task task = new SearchTask(1, "test", "test", () -> "Test", TaskId.EMPTY_TASK_ID, new HashMap<>()); + ThreadContext.StoredContext storedContext = taskResourceTrackingService.startTracking(task); + long threadId = Thread.currentThread().getId(); + taskResourceTrackingService.taskExecutionStartedOnThread(task.getId(), threadId); + + assertTrue(task.getResourceStats().get(threadId).get(0).isActive()); + assertEquals(0, task.getResourceStats().get(threadId).get(0).getResourceUsageInfo().getStatsInfo().get(MEMORY).getTotalValue()); + + taskResourceTrackingService.stopTracking(task); + + // Makes sure stop tracking marks the current active thread inactive and refreshes the resource stats before returning. + assertFalse(task.getResourceStats().get(threadId).get(0).isActive()); + assertTrue(task.getResourceStats().get(threadId).get(0).getResourceUsageInfo().getStatsInfo().get(MEMORY).getTotalValue() > 0); + } + + private void verifyThreadContextFixedHeaders(String key, String value) { + assertEquals(threadPool.getThreadContext().getHeader(key), value); + assertEquals(threadPool.getThreadContext().getTransient(key), value); + assertEquals(threadPool.getThreadContext().getResponseHeaders().get(key).get(0), value); + } + +} diff --git a/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManager.java b/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManager.java index e60871f67ea54..677ec7a0a6600 100644 --- a/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManager.java +++ b/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManager.java @@ -39,6 +39,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Setting.Property; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.tasks.Task; import org.opensearch.tasks.TaskAwareRequest; import org.opensearch.tasks.TaskManager; @@ -127,6 +128,21 @@ public void waitForTaskCompletion(Task task, long untilInNanos) { super.waitForTaskCompletion(task, untilInNanos); } + @Override + public ThreadContext.StoredContext taskExecutionStarted(Task task) { + for (MockTaskManagerListener listener : listeners) { + listener.taskExecutionStarted(task, false); + } + + ThreadContext.StoredContext storedContext = super.taskExecutionStarted(task); + return () -> { + for (MockTaskManagerListener listener : listeners) { + listener.taskExecutionStarted(task, true); + } + storedContext.restore(); + }; + } + public void addListener(MockTaskManagerListener listener) { listeners.add(listener); } diff --git a/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManagerListener.java b/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManagerListener.java index eb8361ac552fc..f15f878995aa2 100644 --- a/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManagerListener.java +++ b/test/framework/src/main/java/org/opensearch/test/tasks/MockTaskManagerListener.java @@ -43,4 +43,7 @@ public interface MockTaskManagerListener { void onTaskUnregistered(Task task); void waitForTaskCompletion(Task task); + + void taskExecutionStarted(Task task, Boolean closeableInvoked); + } diff --git a/test/framework/src/main/java/org/opensearch/threadpool/TestThreadPool.java b/test/framework/src/main/java/org/opensearch/threadpool/TestThreadPool.java index 5f8611d99f0a0..2d97d5bffee01 100644 --- a/test/framework/src/main/java/org/opensearch/threadpool/TestThreadPool.java +++ b/test/framework/src/main/java/org/opensearch/threadpool/TestThreadPool.java @@ -40,6 +40,7 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.atomic.AtomicReference; public class TestThreadPool extends ThreadPool { @@ -47,12 +48,29 @@ public class TestThreadPool extends ThreadPool { private volatile boolean returnRejectingExecutor = false; private volatile ThreadPoolExecutor rejectingExecutor; + public TestThreadPool( + String name, + AtomicReference runnableTaskListener, + ExecutorBuilder... customBuilders + ) { + this(name, Settings.EMPTY, runnableTaskListener, customBuilders); + } + public TestThreadPool(String name, ExecutorBuilder... customBuilders) { this(name, Settings.EMPTY, customBuilders); } public TestThreadPool(String name, Settings settings, ExecutorBuilder... customBuilders) { - super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), customBuilders); + this(name, settings, null, customBuilders); + } + + public TestThreadPool( + String name, + Settings settings, + AtomicReference runnableTaskListener, + ExecutorBuilder... customBuilders + ) { + super(Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), name).put(settings).build(), runnableTaskListener, customBuilders); } @Override From 03fbca3f500d8541b4b32c1456997a8493ebe4f5 Mon Sep 17 00:00:00 2001 From: Peng Huo Date: Thu, 21 Apr 2022 07:06:33 -0700 Subject: [PATCH 37/41] Add new multi_term aggregation (#2687) Adds a new multi_term aggregation. The current implementation focuses on adding new type aggregates. Performance (latency) is suboptimal in this iteration, mainly because of brute force encoding/decoding a list of values into bucket keys. A performance improvement change will be made as a follow on. Signed-off-by: Peng Huo --- .../client/RestHighLevelClient.java | 3 + .../search.aggregation/370_multi_terms.yml | 620 ++++++++++++ .../aggregations/bucket/MultiTermsIT.java | 167 ++++ .../bucket/terms/BaseStringTermsTestCase.java | 256 +++++ .../bucket/terms/StringTermsIT.java | 239 +---- .../org/opensearch/search/SearchModule.java | 9 + .../aggregations/AggregationBuilders.java | 8 + .../bucket/terms/InternalMultiTerms.java | 440 +++++++++ .../bucket/terms/InternalTerms.java | 59 +- .../terms/MultiTermsAggregationBuilder.java | 443 +++++++++ .../terms/MultiTermsAggregationFactory.java | 163 ++++ .../bucket/terms/MultiTermsAggregator.java | 438 +++++++++ .../bucket/terms/ParsedMultiTerms.java | 77 ++ .../bucket/terms/ParsedTerms.java | 7 +- .../BaseMultiValuesSourceFieldConfig.java | 216 +++++ .../support/MultiTermsValuesSourceConfig.java | 203 ++++ .../support/MultiValuesSourceFieldConfig.java | 160 +-- .../aggregations/AggregationsTests.java | 2 + .../bucket/terms/InternalMultiTermsTests.java | 116 +++ .../MultiTermsAggregationBuilderTests.java | 182 ++++ .../terms/MultiTermsAggregatorTests.java | 909 ++++++++++++++++++ .../MultiTermsValuesSourceConfigTests.java | 65 ++ .../test/InternalAggregationTestCase.java | 3 + 23 files changed, 4378 insertions(+), 407 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/370_multi_terms.yml create mode 100644 server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java create mode 100644 server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTerms.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilder.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationFactory.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregator.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedMultiTerms.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/support/BaseMultiValuesSourceFieldConfig.java create mode 100644 server/src/main/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfig.java create mode 100644 server/src/test/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTermsTests.java create mode 100644 server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilderTests.java create mode 100644 server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java create mode 100644 server/src/test/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfigTests.java diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index 3eebb361fd9c4..e69ca149d697d 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -139,7 +139,9 @@ import org.opensearch.search.aggregations.bucket.sampler.InternalSampler; import org.opensearch.search.aggregations.bucket.sampler.ParsedSampler; import org.opensearch.search.aggregations.bucket.terms.LongRareTerms; +import org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.ParsedLongRareTerms; +import org.opensearch.search.aggregations.bucket.terms.ParsedMultiTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedStringRareTerms; @@ -2140,6 +2142,7 @@ static List getDefaultNamedXContents() { map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c)); map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c)); map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); + map.put(MultiTermsAggregationBuilder.NAME, (p, c) -> ParsedMultiTerms.fromXContent(p, (String) c)); List entries = map.entrySet() .stream() .map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue())) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/370_multi_terms.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/370_multi_terms.yml new file mode 100644 index 0000000000000..a0e4762ea9b53 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/370_multi_terms.yml @@ -0,0 +1,620 @@ +setup: + - do: + indices.create: + index: test_1 + body: + settings: + number_of_shards: 1 + number_of_replicas: 0 + mappings: + properties: + str: + type: keyword + ip: + type: ip + boolean: + type: boolean + integer: + type: long + double: + type: double + number: + type: long + date: + type: date + + - do: + indices.create: + index: test_2 + body: + settings: + number_of_shards: 2 + number_of_replicas: 0 + mappings: + properties: + str: + type: keyword + integer: + type: long + boolean: + type: boolean + + - do: + cluster.health: + wait_for_status: green + +--- +"Basic test": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "integer": 1}' + - '{"index": {}}' + - '{"str": "a", "integer": 2}' + - '{"index": {}}' + - '{"str": "b", "integer": 1}' + - '{"index": {}}' + - '{"str": "b", "integer": 2}' + - '{"index": {}}' + - '{"str": "a", "integer": 1}' + - '{"index": {}}' + - '{"str": "b", "integer": 1}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: integer + + - length: { aggregations.m_terms.buckets: 4 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["b", 1] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|1" } + - match: { aggregations.m_terms.buckets.1.doc_count: 2 } + - match: { aggregations.m_terms.buckets.2.key: ["a", 2] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "a|2" } + - match: { aggregations.m_terms.buckets.2.doc_count: 1 } + - match: { aggregations.m_terms.buckets.3.key: ["b", 2] } + - match: { aggregations.m_terms.buckets.3.key_as_string: "b|2" } + - match: { aggregations.m_terms.buckets.3.doc_count: 1 } + +--- +"IP test": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "ip": "::1"}' + - '{"index": {}}' + - '{"str": "a", "ip": "127.0.0.1"}' + - '{"index": {}}' + - '{"str": "b", "ip": "::1"}' + - '{"index": {}}' + - '{"str": "b", "ip": "127.0.0.1"}' + - '{"index": {}}' + - '{"str": "a", "ip": "127.0.0.1"}' + - '{"index": {}}' + - '{"str": "b", "ip": "::1"}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: ip + + - length: { aggregations.m_terms.buckets: 4 } + - match: { aggregations.m_terms.buckets.0.key: ["a", "127.0.0.1"] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|127.0.0.1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["b", "::1"] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|::1" } + - match: { aggregations.m_terms.buckets.1.doc_count: 2 } + - match: { aggregations.m_terms.buckets.2.key: ["a", "::1"] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "a|::1" } + - match: { aggregations.m_terms.buckets.2.doc_count: 1 } + - match: { aggregations.m_terms.buckets.3.key: ["b", "127.0.0.1"] } + - match: { aggregations.m_terms.buckets.3.key_as_string: "b|127.0.0.1" } + - match: { aggregations.m_terms.buckets.3.doc_count: 1 } + +--- +"Boolean test": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "boolean": true}' + - '{"index": {}}' + - '{"str": "a", "boolean": false}' + - '{"index": {}}' + - '{"str": "b", "boolean": false}' + - '{"index": {}}' + - '{"str": "b", "boolean": true}' + - '{"index": {}}' + - '{"str": "a", "boolean": true}' + - '{"index": {}}' + - '{"str": "b", "boolean": false}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: boolean + + - length: { aggregations.m_terms.buckets: 4 } + - match: { aggregations.m_terms.buckets.0.key: ["a", true] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|true" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["b", false] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|false" } + - match: { aggregations.m_terms.buckets.1.doc_count: 2 } + - match: { aggregations.m_terms.buckets.2.key: ["a", false] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "a|false" } + - match: { aggregations.m_terms.buckets.2.doc_count: 1 } + - match: { aggregations.m_terms.buckets.3.key: ["b", true] } + - match: { aggregations.m_terms.buckets.3.key_as_string: "b|true" } + - match: { aggregations.m_terms.buckets.3.doc_count: 1 } + +--- +"Double test": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "double": 1234.5}' + - '{"index": {}}' + - '{"str": "a", "double": 5678.5}' + - '{"index": {}}' + - '{"str": "b", "double": 1234.5}' + - '{"index": {}}' + - '{"str": "a", "double": 1234.5}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: double + + - length: { aggregations.m_terms.buckets: 3 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1234.5] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1234.5" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["a", 5678.5] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "a|5678.5" } + - match: { aggregations.m_terms.buckets.1.doc_count: 1 } + - match: { aggregations.m_terms.buckets.2.key: ["b", 1234.5] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "b|1234.5" } + - match: { aggregations.m_terms.buckets.2.doc_count: 1 } + +--- +"Date test": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "date": "2022-03-23"}' + - '{"index": {}}' + - '{"str": "a", "date": "2022-03-25"}' + - '{"index": {}}' + - '{"str": "b", "date": "2022-03-23"}' + - '{"index": {}}' + - '{"str": "a", "date": "2022-03-23"}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: date + + - length: { aggregations.m_terms.buckets: 3 } + - match: { aggregations.m_terms.buckets.0.key: ["a", "2022-03-23T00:00:00.000Z"] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|2022-03-23T00:00:00.000Z" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["a", "2022-03-25T00:00:00.000Z"] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "a|2022-03-25T00:00:00.000Z" } + - match: { aggregations.m_terms.buckets.1.doc_count: 1 } + - match: { aggregations.m_terms.buckets.2.key: ["b", "2022-03-23T00:00:00.000Z"] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "b|2022-03-23T00:00:00.000Z" } + - match: { aggregations.m_terms.buckets.2.doc_count: 1 } + +--- +"Unmapped keywords": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "integer": 1}' + - '{"index": {}}' + - '{"str": "a", "integer": 2}' + - '{"index": {}}' + - '{"str": "b", "integer": 1}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: unmapped_string + value_type: string + missing: abc + + - length: { aggregations.m_terms.buckets: 2 } + - match: { aggregations.m_terms.buckets.0.key: ["a", "abc"] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|abc" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["b", "abc"] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|abc" } + - match: { aggregations.m_terms.buckets.1.doc_count: 1 } + +--- +"Null value": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "integer": null}' + - '{"index": {}}' + - '{"str": "a", "integer": 2}' + - '{"index": {}}' + - '{"str": null, "integer": 1}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: integer + + - length: { aggregations.m_terms.buckets: 1 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 2] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|2" } + - match: { aggregations.m_terms.buckets.0.doc_count: 1 } + +--- +"multiple multi_terms bucket": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "integer": 1, "double": 1234.5, "boolean": true}' + - '{"index": {}}' + - '{"str": "a", "integer": 1, "double": 5678.9, "boolean": false}' + - '{"index": {}}' + - '{"str": "a", "integer": 1, "double": 1234.5, "boolean": true}' + - '{"index": {}}' + - '{"str": "b", "integer": 1, "double": 1234.5, "boolean": true}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: integer + aggs: + n_terms: + multi_terms: + terms: + - field: double + - field: boolean + + - length: { aggregations.m_terms.buckets: 2 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 3 } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.0.key: [1234.5, true] } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.0.key_as_string: "1234.5|true" } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.1.key: [5678.9, false] } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.1.key_as_string: "5678.9|false" } + - match: { aggregations.m_terms.buckets.0.n_terms.buckets.1.doc_count: 1 } + - match: { aggregations.m_terms.buckets.1.key: ["b", 1] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|1" } + - match: { aggregations.m_terms.buckets.1.doc_count: 1 } + +--- +"ordered by metrics": + - skip: + version: "- 3.0.0" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "double": 1234.5, "integer": 1}' + - '{"index": {}}' + - '{"str": "b", "double": 5678.9, "integer": 2}' + - '{"index": {}}' + - '{"str": "b", "double": 5678.9, "integer": 2}' + - '{"index": {}}' + - '{"str": "a", "double": 1234.5, "integer": 1}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: double + order: + the_int_sum: desc + aggs: + the_int_sum: + sum: + field: integer + + - length: { aggregations.m_terms.buckets: 2 } + - match: { aggregations.m_terms.buckets.0.key: ["b", 5678.9] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "b|5678.9" } + - match: { aggregations.m_terms.buckets.0.the_int_sum.value: 4.0 } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["a", 1234.5] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "a|1234.5" } + - match: { aggregations.m_terms.buckets.1.the_int_sum.value: 2.0 } + - match: { aggregations.m_terms.buckets.1.doc_count: 2 } + +--- +"top 1 ordered by metrics ": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "double": 1234.5, "integer": 1}' + - '{"index": {}}' + - '{"str": "b", "double": 5678.9, "integer": 2}' + - '{"index": {}}' + - '{"str": "b", "double": 5678.9, "integer": 2}' + - '{"index": {}}' + - '{"str": "a", "double": 1234.5, "integer": 1}' + + - do: + search: + index: test_1 + size: 0 + body: + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: double + order: + the_int_sum: desc + size: 1 + aggs: + the_int_sum: + sum: + field: integer + + - length: { aggregations.m_terms.buckets: 1 } + - match: { aggregations.m_terms.buckets.0.key: ["b", 5678.9] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "b|5678.9" } + - match: { aggregations.m_terms.buckets.0.the_int_sum.value: 4.0 } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + +--- +"min_doc_count": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_1 + refresh: true + body: + - '{"index": {}}' + - '{"str": "a", "integer": 1}' + - '{"index": {}}' + - '{"str": "a", "integer": 1}' + - '{"index": {}}' + - '{"str": "b", "integer": 1}' + - '{"index": {}}' + - '{"str": "c", "integer": 1}' + + - do: + search: + index: test_1 + body: + size: 0 + query: + simple_query_string: + fields: [str] + query: a b + minimum_should_match: 1 + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: integer + min_doc_count: 2 + + - length: { aggregations.m_terms.buckets: 1 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + + - do: + search: + index: test_1 + body: + size: 0 + query: + simple_query_string: + fields: [str] + query: a b + minimum_should_match: 1 + aggs: + m_terms: + multi_terms: + terms: + - field: str + - field: integer + min_doc_count: 0 + + - length: { aggregations.m_terms.buckets: 3 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 2 } + - match: { aggregations.m_terms.buckets.1.key: ["b", 1] } + - match: { aggregations.m_terms.buckets.1.key_as_string: "b|1" } + - match: { aggregations.m_terms.buckets.1.doc_count: 1 } + - match: { aggregations.m_terms.buckets.2.key: ["c", 1] } + - match: { aggregations.m_terms.buckets.2.key_as_string: "c|1" } + - match: { aggregations.m_terms.buckets.2.doc_count: 0 } + +--- +"sum_other_doc_count": + - skip: + version: "- 2.9.99" + reason: multi_terms aggregation is introduced in 3.0.0 + + - do: + bulk: + index: test_2 + refresh: true + body: + - '{"index": {"routing": "s1"}}' + - '{"str": "a", "integer": 1}' + - '{"index": {"routing": "s1"}}' + - '{"str": "a", "integer": 1}' + - '{"index": {"routing": "s1"}}' + - '{"str": "a", "integer": 1}' + - '{"index": {"routing": "s1"}}' + - '{"str": "a", "integer": 1}' + - '{"index": {"routing": "s2"}}' + - '{"str": "b", "integer": 1}' + - '{"index": {"routing": "s2"}}' + - '{"str": "b", "integer": 1}' + - '{"index": {"routing": "s2"}}' + - '{"str": "b", "integer": 1}' + - '{"index": {"routing": "s2"}}' + - '{"str": "a", "integer": 1}' + + - do: + search: + index: test_2 + size: 0 + body: + aggs: + m_terms: + multi_terms: + size: 1 + shard_size: 1 + terms: + - field: str + - field: integer + + - length: { aggregations.m_terms.buckets: 1 } + - match: { aggregations.m_terms.sum_other_doc_count: 4 } + - match: { aggregations.m_terms.buckets.0.key: ["a", 1] } + - match: { aggregations.m_terms.buckets.0.key_as_string: "a|1" } + - match: { aggregations.m_terms.buckets.0.doc_count: 4 } diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java new file mode 100644 index 0000000000000..7d7f80c8ac758 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/MultiTermsIT.java @@ -0,0 +1,167 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket; + +import org.opensearch.action.search.SearchResponse; +import org.opensearch.script.Script; +import org.opensearch.script.ScriptType; +import org.opensearch.search.aggregations.bucket.terms.BaseStringTermsTestCase; +import org.opensearch.search.aggregations.bucket.terms.StringTermsIT; +import org.opensearch.search.aggregations.bucket.terms.Terms; +import org.opensearch.search.aggregations.support.MultiTermsValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValueType; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.util.Collections; + +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.core.IsNull.notNullValue; +import static org.opensearch.search.aggregations.AggregationBuilders.multiTerms; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; + +/** + * Extend {@link BaseStringTermsTestCase}. + */ +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class MultiTermsIT extends BaseStringTermsTestCase { + + // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard + public void testSizeIsZero() { + final int minDocCount = randomInt(1); + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> client().prepareSearch("high_card_idx") + .addAggregation( + multiTerms("mterms").terms( + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(SINGLE_VALUED_FIELD_NAME).build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(MULTI_VALUED_FIELD_NAME).build() + ) + ).minDocCount(minDocCount).size(0) + ) + .get() + ); + assertThat(exception.getMessage(), containsString("[size] must be greater than 0. Found [0] in [mterms]")); + } + + public void testSingleValuedFieldWithValueScript() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + multiTerms("mterms").terms( + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName("i").build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(SINGLE_VALUED_FIELD_NAME) + .setScript( + new Script( + ScriptType.INLINE, + StringTermsIT.CustomScriptPlugin.NAME, + "'foo_' + _value", + Collections.emptyMap() + ) + ) + .build() + ) + ) + ) + .get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("mterms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("mterms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + Terms.Bucket bucket = terms.getBucketByKey(i + "|foo_val" + i); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo(i + "|foo_val" + i)); + assertThat(bucket.getDocCount(), equalTo(1L)); + } + } + + public void testSingleValuedFieldWithScript() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + multiTerms("mterms").terms( + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName("i").build(), + new MultiTermsValuesSourceConfig.Builder().setScript( + new Script( + ScriptType.INLINE, + StringTermsIT.CustomScriptPlugin.NAME, + "doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", + Collections.emptyMap() + ) + ).setUserValueTypeHint(ValueType.STRING).build() + ) + ) + ) + .get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("mterms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("mterms")); + assertThat(terms.getBuckets().size(), equalTo(5)); + + for (int i = 0; i < 5; i++) { + Terms.Bucket bucket = terms.getBucketByKey(i + "|val" + i); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo(i + "|val" + i)); + assertThat(bucket.getDocCount(), equalTo(1L)); + } + } + + public void testMultiValuedFieldWithValueScript() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation( + multiTerms("mterms").terms( + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName("tag").build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(MULTI_VALUED_FIELD_NAME) + .setScript( + new Script( + ScriptType.INLINE, + StringTermsIT.CustomScriptPlugin.NAME, + "_value.substring(0,3)", + Collections.emptyMap() + ) + ) + .build() + ) + ) + ) + .get(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("mterms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("mterms")); + assertThat(terms.getBuckets().size(), equalTo(2)); + + Terms.Bucket bucket = terms.getBucketByKey("more|val"); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo("more|val")); + assertThat(bucket.getDocCount(), equalTo(3L)); + + bucket = terms.getBucketByKey("less|val"); + assertThat(bucket, notNullValue()); + assertThat(key(bucket), equalTo("less|val")); + assertThat(bucket.getDocCount(), equalTo(2L)); + } + + private MultiTermsValuesSourceConfig field(String name) { + return new MultiTermsValuesSourceConfig.Builder().setFieldName(name).build(); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java new file mode 100644 index 0000000000000..7775618ba5b13 --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/BaseStringTermsTestCase.java @@ -0,0 +1,256 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.junit.After; +import org.junit.Before; +import org.opensearch.action.index.IndexRequestBuilder; +import org.opensearch.common.Strings; +import org.opensearch.index.fielddata.ScriptDocValues; +import org.opensearch.plugins.Plugin; +import org.opensearch.search.aggregations.AggregationTestScriptsPlugin; +import org.opensearch.search.aggregations.bucket.AbstractTermsTestCase; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.SuiteScopeTestCase +public class BaseStringTermsTestCase extends AbstractTermsTestCase { + + protected static final String SINGLE_VALUED_FIELD_NAME = "s_value"; + protected static final String MULTI_VALUED_FIELD_NAME = "s_values"; + protected static Map> expectedMultiSortBuckets; + + @Override + protected Collection> nodePlugins() { + return Collections.singleton(CustomScriptPlugin.class); + } + + @Before + public void randomizeOptimizations() { + TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = randomBoolean(); + TermsAggregatorFactory.REMAP_GLOBAL_ORDS = randomBoolean(); + } + + @After + public void resetOptimizations() { + TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = null; + TermsAggregatorFactory.REMAP_GLOBAL_ORDS = null; + } + + public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { + + @Override + protected Map, Object>> pluginScripts() { + Map, Object>> scripts = super.pluginScripts(); + + scripts.put("'foo_' + _value", vars -> "foo_" + (String) vars.get("_value")); + scripts.put("_value.substring(0,3)", vars -> ((String) vars.get("_value")).substring(0, 3)); + + scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { + Map doc = (Map) vars.get("doc"); + return doc.get(MULTI_VALUED_FIELD_NAME); + }); + + scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { + Map doc = (Map) vars.get("doc"); + ScriptDocValues.Strings value = (ScriptDocValues.Strings) doc.get(SINGLE_VALUED_FIELD_NAME); + return value.getValue(); + }); + + scripts.put("42", vars -> 42); + + return scripts; + } + + @Override + protected Map, Object>> nonDeterministicPluginScripts() { + Map, Object>> scripts = new HashMap<>(); + + scripts.put("Math.random()", vars -> randomDouble()); + + return scripts; + } + } + + @Override + public void setupSuiteScopeCluster() throws Exception { + assertAcked( + client().admin() + .indices() + .prepareCreate("idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); + List builders = new ArrayList<>(); + for (int i = 0; i < 5; i++) { + builders.add( + client().prepareIndex("idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, "val" + i) + .field("i", i) + .field("constant", 1) + .field("tag", i < 5 / 2 + 1 ? "more" : "less") + .startArray(MULTI_VALUED_FIELD_NAME) + .value("val" + i) + .value("val" + (i + 1)) + .endArray() + .endObject() + ) + ); + } + + getMultiSortDocs(builders); + + assertAcked( + client().admin() + .indices() + .prepareCreate("high_card_idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); + for (int i = 0; i < 100; i++) { + builders.add( + client().prepareIndex("high_card_idx") + .setSource( + jsonBuilder().startObject() + .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) + .startArray(MULTI_VALUED_FIELD_NAME) + .value("val" + Strings.padStart(i + "", 3, '0')) + .value("val" + Strings.padStart((i + 1) + "", 3, '0')) + .endArray() + .endObject() + ) + ); + } + prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); + + for (int i = 0; i < 2; i++) { + builders.add( + client().prepareIndex("empty_bucket_idx") + .setId("" + i) + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) + ); + } + indexRandom(true, builders); + createIndex("idx_unmapped"); + ensureSearchable(); + } + + private void getMultiSortDocs(List builders) throws IOException { + expectedMultiSortBuckets = new HashMap<>(); + Map bucketProps = new HashMap<>(); + bucketProps.put("_term", "val1"); + bucketProps.put("_count", 3L); + bucketProps.put("avg_l", 1d); + bucketProps.put("sum_d", 6d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val2"); + bucketProps.put("_count", 3L); + bucketProps.put("avg_l", 2d); + bucketProps.put("sum_d", 6d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val3"); + bucketProps.put("_count", 2L); + bucketProps.put("avg_l", 3d); + bucketProps.put("sum_d", 3d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val4"); + bucketProps.put("_count", 2L); + bucketProps.put("avg_l", 3d); + bucketProps.put("sum_d", 4d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val5"); + bucketProps.put("_count", 2L); + bucketProps.put("avg_l", 5d); + bucketProps.put("sum_d", 3d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val6"); + bucketProps.put("_count", 1L); + bucketProps.put("avg_l", 5d); + bucketProps.put("sum_d", 1d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + bucketProps = new HashMap<>(); + bucketProps.put("_term", "val7"); + bucketProps.put("_count", 1L); + bucketProps.put("avg_l", 5d); + bucketProps.put("sum_d", 1d); + expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); + + assertAcked( + client().admin() + .indices() + .prepareCreate("sort_idx") + .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") + .get() + ); + for (int i = 1; i <= 3; i++) { + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject()) + ); + } + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject()) + ); + builders.add( + client().prepareIndex("sort_idx") + .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject()) + ); + } + + protected String key(Terms.Bucket bucket) { + return bucket.getKeyAsString(); + } +} diff --git a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java index 3190bcb72fcbb..64f81cdcdec98 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/aggregations/bucket/terms/StringTermsIT.java @@ -32,25 +32,19 @@ package org.opensearch.search.aggregations.bucket.terms; import org.opensearch.OpenSearchException; -import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.SearchPhaseExecutionException; import org.opensearch.action.search.SearchResponse; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.XContentParseException; import org.opensearch.common.xcontent.XContentParser; import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.index.mapper.IndexFieldMapper; import org.opensearch.index.query.QueryBuilders; -import org.opensearch.plugins.Plugin; import org.opensearch.script.Script; import org.opensearch.script.ScriptType; import org.opensearch.search.aggregations.AggregationExecutionException; -import org.opensearch.search.aggregations.AggregationTestScriptsPlugin; import org.opensearch.search.aggregations.Aggregator.SubAggCollectionMode; import org.opensearch.search.aggregations.BucketOrder; -import org.opensearch.search.aggregations.bucket.AbstractTermsTestCase; import org.opensearch.search.aggregations.bucket.filter.Filter; import org.opensearch.search.aggregations.bucket.terms.Terms.Bucket; import org.opensearch.search.aggregations.metrics.Avg; @@ -60,23 +54,13 @@ import org.opensearch.search.aggregations.support.ValueType; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.test.OpenSearchIntegTestCase; -import org.junit.After; -import org.junit.Before; -import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; -import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.function.Function; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.index.query.QueryBuilders.termQuery; import static org.opensearch.search.aggregations.AggregationBuilders.avg; import static org.opensearch.search.aggregations.AggregationBuilders.extendedStats; @@ -93,228 +77,7 @@ import static org.hamcrest.core.IsNull.notNullValue; @OpenSearchIntegTestCase.SuiteScopeTestCase -public class StringTermsIT extends AbstractTermsTestCase { - - private static final String SINGLE_VALUED_FIELD_NAME = "s_value"; - private static final String MULTI_VALUED_FIELD_NAME = "s_values"; - private static Map> expectedMultiSortBuckets; - - @Override - protected Collection> nodePlugins() { - return Collections.singleton(CustomScriptPlugin.class); - } - - @Before - public void randomizeOptimizations() { - TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = randomBoolean(); - TermsAggregatorFactory.REMAP_GLOBAL_ORDS = randomBoolean(); - } - - @After - public void resetOptimizations() { - TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = null; - TermsAggregatorFactory.REMAP_GLOBAL_ORDS = null; - } - - public static class CustomScriptPlugin extends AggregationTestScriptsPlugin { - - @Override - protected Map, Object>> pluginScripts() { - Map, Object>> scripts = super.pluginScripts(); - - scripts.put("'foo_' + _value", vars -> "foo_" + (String) vars.get("_value")); - scripts.put("_value.substring(0,3)", vars -> ((String) vars.get("_value")).substring(0, 3)); - - scripts.put("doc['" + MULTI_VALUED_FIELD_NAME + "']", vars -> { - Map doc = (Map) vars.get("doc"); - return doc.get(MULTI_VALUED_FIELD_NAME); - }); - - scripts.put("doc['" + SINGLE_VALUED_FIELD_NAME + "'].value", vars -> { - Map doc = (Map) vars.get("doc"); - ScriptDocValues.Strings value = (ScriptDocValues.Strings) doc.get(SINGLE_VALUED_FIELD_NAME); - return value.getValue(); - }); - - scripts.put("42", vars -> 42); - - return scripts; - } - - @Override - protected Map, Object>> nonDeterministicPluginScripts() { - Map, Object>> scripts = new HashMap<>(); - - scripts.put("Math.random()", vars -> StringTermsIT.randomDouble()); - - return scripts; - } - } - - @Override - public void setupSuiteScopeCluster() throws Exception { - assertAcked( - client().admin() - .indices() - .prepareCreate("idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() - ); - List builders = new ArrayList<>(); - for (int i = 0; i < 5; i++) { - builders.add( - client().prepareIndex("idx") - .setSource( - jsonBuilder().startObject() - .field(SINGLE_VALUED_FIELD_NAME, "val" + i) - .field("i", i) - .field("constant", 1) - .field("tag", i < 5 / 2 + 1 ? "more" : "less") - .startArray(MULTI_VALUED_FIELD_NAME) - .value("val" + i) - .value("val" + (i + 1)) - .endArray() - .endObject() - ) - ); - } - - getMultiSortDocs(builders); - - assertAcked( - client().admin() - .indices() - .prepareCreate("high_card_idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() - ); - for (int i = 0; i < 100; i++) { - builders.add( - client().prepareIndex("high_card_idx") - .setSource( - jsonBuilder().startObject() - .field(SINGLE_VALUED_FIELD_NAME, "val" + Strings.padStart(i + "", 3, '0')) - .startArray(MULTI_VALUED_FIELD_NAME) - .value("val" + Strings.padStart(i + "", 3, '0')) - .value("val" + Strings.padStart((i + 1) + "", 3, '0')) - .endArray() - .endObject() - ) - ); - } - prepareCreate("empty_bucket_idx").setMapping(SINGLE_VALUED_FIELD_NAME, "type=integer").get(); - - for (int i = 0; i < 2; i++) { - builders.add( - client().prepareIndex("empty_bucket_idx") - .setId("" + i) - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject()) - ); - } - indexRandom(true, builders); - createIndex("idx_unmapped"); - ensureSearchable(); - } - - private void getMultiSortDocs(List builders) throws IOException { - expectedMultiSortBuckets = new HashMap<>(); - Map bucketProps = new HashMap<>(); - bucketProps.put("_term", "val1"); - bucketProps.put("_count", 3L); - bucketProps.put("avg_l", 1d); - bucketProps.put("sum_d", 6d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val2"); - bucketProps.put("_count", 3L); - bucketProps.put("avg_l", 2d); - bucketProps.put("sum_d", 6d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val3"); - bucketProps.put("_count", 2L); - bucketProps.put("avg_l", 3d); - bucketProps.put("sum_d", 3d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val4"); - bucketProps.put("_count", 2L); - bucketProps.put("avg_l", 3d); - bucketProps.put("sum_d", 4d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val5"); - bucketProps.put("_count", 2L); - bucketProps.put("avg_l", 5d); - bucketProps.put("sum_d", 3d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val6"); - bucketProps.put("_count", 1L); - bucketProps.put("avg_l", 5d); - bucketProps.put("sum_d", 1d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - bucketProps = new HashMap<>(); - bucketProps.put("_term", "val7"); - bucketProps.put("_count", 1L); - bucketProps.put("avg_l", 5d); - bucketProps.put("sum_d", 1d); - expectedMultiSortBuckets.put((String) bucketProps.get("_term"), bucketProps); - - assertAcked( - client().admin() - .indices() - .prepareCreate("sort_idx") - .setMapping(SINGLE_VALUED_FIELD_NAME, "type=keyword", MULTI_VALUED_FIELD_NAME, "type=keyword", "tag", "type=keyword") - .get() - ); - for (int i = 1; i <= 3; i++) { - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val1").field("l", 1).field("d", i).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val2").field("l", 2).field("d", i).endObject()) - ); - } - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 1).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val3").field("l", 3).field("d", 2).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 1).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val4").field("l", 3).field("d", 3).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 1).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val5").field("l", 5).field("d", 2).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val6").field("l", 5).field("d", 1).endObject()) - ); - builders.add( - client().prepareIndex("sort_idx") - .setSource(jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val7").field("l", 5).field("d", 1).endObject()) - ); - } - - private String key(Terms.Bucket bucket) { - return bucket.getKeyAsString(); - } +public class StringTermsIT extends BaseStringTermsTestCase { // the main purpose of this test is to make sure we're not allocating 2GB of memory per shard public void testSizeIsZero() { diff --git a/server/src/main/java/org/opensearch/search/SearchModule.java b/server/src/main/java/org/opensearch/search/SearchModule.java index dc5309b50abb8..bf0cc646d271e 100644 --- a/server/src/main/java/org/opensearch/search/SearchModule.java +++ b/server/src/main/java/org/opensearch/search/SearchModule.java @@ -159,8 +159,11 @@ import org.opensearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.opensearch.search.aggregations.bucket.sampler.UnmappedSampler; import org.opensearch.search.aggregations.bucket.terms.DoubleTerms; +import org.opensearch.search.aggregations.bucket.terms.InternalMultiTerms; import org.opensearch.search.aggregations.bucket.terms.LongRareTerms; import org.opensearch.search.aggregations.bucket.terms.LongTerms; +import org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationFactory; import org.opensearch.search.aggregations.bucket.terms.RareTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.SignificantLongTerms; import org.opensearch.search.aggregations.bucket.terms.SignificantStringTerms; @@ -687,6 +690,12 @@ private ValuesSourceRegistry registerAggregations(List plugins) { .setAggregatorRegistrar(CompositeAggregationBuilder::registerAggregators), builder ); + registerAggregation( + new AggregationSpec(MultiTermsAggregationBuilder.NAME, MultiTermsAggregationBuilder::new, MultiTermsAggregationBuilder.PARSER) + .addResultReader(InternalMultiTerms::new) + .setAggregatorRegistrar(MultiTermsAggregationFactory::registerAggregators), + builder + ); registerFromPlugin(plugins, SearchPlugin::getAggregations, (agg) -> this.registerAggregation(agg, builder)); // after aggs have been registered, see if there are any new VSTypes that need to be linked to core fields diff --git a/server/src/main/java/org/opensearch/search/aggregations/AggregationBuilders.java b/server/src/main/java/org/opensearch/search/aggregations/AggregationBuilders.java index 99a1107675edf..69a9fd92ac459 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/AggregationBuilders.java +++ b/server/src/main/java/org/opensearch/search/aggregations/AggregationBuilders.java @@ -66,6 +66,7 @@ import org.opensearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; import org.opensearch.search.aggregations.bucket.sampler.Sampler; import org.opensearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.SignificantTerms; import org.opensearch.search.aggregations.bucket.terms.SignificantTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.SignificantTextAggregationBuilder; @@ -388,4 +389,11 @@ public static ScriptedMetricAggregationBuilder scriptedMetric(String name) { public static CompositeAggregationBuilder composite(String name, List> sources) { return new CompositeAggregationBuilder(name, sources); } + + /** + * Create a new {@link MultiTermsAggregationBuilder} aggregation with the given name. + */ + public static MultiTermsAggregationBuilder multiTerms(String name) { + return new MultiTermsAggregationBuilder(name); + } } diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTerms.java new file mode 100644 index 0000000000000..fd1758d3ea8ba --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTerms.java @@ -0,0 +1,440 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.apache.lucene.util.BytesRef; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.AggregationExecutionException; +import org.opensearch.search.aggregations.Aggregations; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.aggregations.KeyComparable; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Result of the {@link MultiTermsAggregator}. + */ +public class InternalMultiTerms extends InternalTerms { + /** + * Internal Multi Terms Bucket. + */ + public static class Bucket extends InternalTerms.AbstractInternalBucket implements KeyComparable { + + protected long bucketOrd; + /** + * list of terms values. + */ + protected List termValues; + protected long docCount; + protected InternalAggregations aggregations; + protected boolean showDocCountError; + protected long docCountError; + /** + * A list of term's {@link DocValueFormat}. + */ + protected final List termFormats; + + private static final String PIPE = "|"; + + /** + * Create default {@link Bucket}. + */ + public static Bucket EMPTY(boolean showTermDocCountError, List formats) { + return new Bucket(null, 0, null, showTermDocCountError, 0, formats); + } + + public Bucket( + List values, + long docCount, + InternalAggregations aggregations, + boolean showDocCountError, + long docCountError, + List formats + ) { + this.termValues = values; + this.docCount = docCount; + this.aggregations = aggregations; + this.showDocCountError = showDocCountError; + this.docCountError = docCountError; + this.termFormats = formats; + } + + public Bucket(StreamInput in, List formats, boolean showDocCountError) throws IOException { + this.termValues = in.readList(StreamInput::readGenericValue); + this.docCount = in.readVLong(); + this.aggregations = InternalAggregations.readFrom(in); + this.showDocCountError = showDocCountError; + this.docCountError = -1; + if (showDocCountError) { + this.docCountError = in.readLong(); + } + this.termFormats = formats; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(CommonFields.KEY.getPreferredName(), getKey()); + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); + if (showDocCountError) { + builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); + } + aggregations.toXContentInternal(builder, params); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(termValues, StreamOutput::writeGenericValue); + out.writeVLong(docCount); + aggregations.writeTo(out); + if (showDocCountError) { + out.writeLong(docCountError); + } + } + + @Override + public List getKey() { + List keys = new ArrayList<>(termValues.size()); + for (int i = 0; i < termValues.size(); i++) { + keys.add(formatObject(termValues.get(i), termFormats.get(i))); + } + return keys; + } + + @Override + public String getKeyAsString() { + return getKey().stream().map(Object::toString).collect(Collectors.joining(PIPE)); + } + + @Override + public long getDocCount() { + return docCount; + } + + @Override + public Aggregations getAggregations() { + return aggregations; + } + + @Override + void setDocCountError(long docCountError) { + this.docCountError = docCountError; + } + + @Override + public void setDocCountError(Function updater) { + this.docCountError = updater.apply(this.docCountError); + } + + @Override + public boolean showDocCountError() { + return showDocCountError; + } + + @Override + public Number getKeyAsNumber() { + throw new IllegalArgumentException("getKeyAsNumber is not supported by [" + MultiTermsAggregationBuilder.NAME + "]"); + } + + @Override + public long getDocCountError() { + if (!showDocCountError) { + throw new IllegalStateException("show_terms_doc_count_error is false"); + } + return docCountError; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + Bucket other = (Bucket) obj; + if (showDocCountError && docCountError != other.docCountError) { + return false; + } + return termValues.equals(other.termValues) + && docCount == other.docCount + && aggregations.equals(other.aggregations) + && showDocCountError == other.showDocCountError; + } + + @Override + public int hashCode() { + return Objects.hash(termValues, docCount, aggregations, showDocCountError, showDocCountError ? docCountError : 0); + } + + @Override + public int compareKey(Bucket other) { + return new BucketComparator().compare(this.termValues, other.termValues); + } + + /** + * Visible for testing. + */ + protected static class BucketComparator implements Comparator> { + @SuppressWarnings({ "unchecked" }) + @Override + public int compare(List thisObjects, List thatObjects) { + if (thisObjects.size() != thatObjects.size()) { + throw new AggregationExecutionException( + "[" + MultiTermsAggregationBuilder.NAME + "] aggregations failed due to terms" + " size is different" + ); + } + for (int i = 0; i < thisObjects.size(); i++) { + final Object thisObject = thisObjects.get(i); + final Object thatObject = thatObjects.get(i); + int ret = ((Comparable) thisObject).compareTo(thatObject); + if (ret != 0) { + return ret; + } + } + return 0; + } + } + } + + private final int shardSize; + private final boolean showTermDocCountError; + private final long otherDocCount; + private final List termFormats; + private final List buckets; + private Map bucketMap; + + private long docCountError; + + public InternalMultiTerms( + String name, + BucketOrder reduceOrder, + BucketOrder order, + int requiredSize, + long minDocCount, + Map metadata, + int shardSize, + boolean showTermDocCountError, + long otherDocCount, + long docCountError, + List formats, + List buckets + ) { + super(name, reduceOrder, order, requiredSize, minDocCount, metadata); + this.shardSize = shardSize; + this.showTermDocCountError = showTermDocCountError; + this.otherDocCount = otherDocCount; + this.termFormats = formats; + this.buckets = buckets; + this.docCountError = docCountError; + } + + public InternalMultiTerms(StreamInput in) throws IOException { + super(in); + this.docCountError = in.readZLong(); + this.termFormats = in.readList(stream -> stream.readNamedWriteable(DocValueFormat.class)); + this.shardSize = readSize(in); + this.showTermDocCountError = in.readBoolean(); + this.otherDocCount = in.readVLong(); + this.buckets = in.readList(steam -> new Bucket(steam, termFormats, showTermDocCountError)); + } + + @Override + public String getWriteableName() { + return MultiTermsAggregationBuilder.NAME; + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + } + + @Override + public InternalMultiTerms create(List buckets) { + return new InternalMultiTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + shardSize, + showTermDocCountError, + otherDocCount, + docCountError, + termFormats, + buckets + ); + } + + @Override + public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { + return new Bucket( + prototype.termValues, + prototype.docCount, + aggregations, + prototype.showDocCountError, + prototype.docCountError, + prototype.termFormats + ); + } + + @Override + protected void writeTermTypeInfoTo(StreamOutput out) throws IOException { + out.writeZLong(docCountError); + out.writeCollection(termFormats, StreamOutput::writeNamedWriteable); + writeSize(shardSize, out); + out.writeBoolean(showTermDocCountError); + out.writeVLong(otherDocCount); + out.writeList(buckets); + } + + @Override + public List getBuckets() { + return buckets; + } + + @Override + public Bucket getBucketByKey(String term) { + if (bucketMap == null) { + bucketMap = buckets.stream().collect(Collectors.toMap(InternalMultiTerms.Bucket::getKeyAsString, Function.identity())); + } + return bucketMap.get(term); + } + + @Override + public long getDocCountError() { + return docCountError; + } + + @Override + public long getSumOfOtherDocCounts() { + return otherDocCount; + } + + @Override + protected void setDocCountError(long docCountError) { + this.docCountError = docCountError; + } + + @Override + protected int getShardSize() { + return shardSize; + } + + @Override + protected InternalMultiTerms create( + String name, + List buckets, + BucketOrder reduceOrder, + long docCountError, + long otherDocCount + ) { + return new InternalMultiTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + shardSize, + showTermDocCountError, + otherDocCount, + docCountError, + termFormats, + buckets + ); + } + + @Override + protected Bucket[] createBucketsArray(int size) { + return new Bucket[size]; + } + + @Override + Bucket createBucket(long docCount, InternalAggregations aggs, long docCountError, Bucket prototype) { + return new Bucket( + prototype.termValues, + docCount, + aggs, + prototype.showDocCountError, + prototype.docCountError, + prototype.termFormats + ); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + if (super.equals(obj) == false) return false; + InternalMultiTerms that = (InternalMultiTerms) obj; + + if (showTermDocCountError && docCountError != that.docCountError) { + return false; + } + return Objects.equals(buckets, that.buckets) + && Objects.equals(otherDocCount, that.otherDocCount) + && Objects.equals(showTermDocCountError, that.showTermDocCountError) + && Objects.equals(shardSize, that.shardSize) + && Objects.equals(docCountError, that.docCountError); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), buckets, otherDocCount, showTermDocCountError, shardSize); + } + + /** + * Copy from InternalComposite + * + * Format obj using the provided {@link DocValueFormat}. + * If the format is equals to {@link DocValueFormat#RAW}, the object is returned as is + * for numbers and a string for {@link BytesRef}s. + */ + static Object formatObject(Object obj, DocValueFormat format) { + if (obj == null) { + return null; + } + if (obj.getClass() == BytesRef.class) { + BytesRef value = (BytesRef) obj; + if (format == DocValueFormat.RAW) { + return value.utf8ToString(); + } else { + return format.format(value); + } + } else if (obj.getClass() == Long.class) { + long value = (long) obj; + if (format == DocValueFormat.RAW) { + return value; + } else { + return format.format(value); + } + } else if (obj.getClass() == Double.class) { + double value = (double) obj; + if (format == DocValueFormat.RAW) { + return value; + } else { + return format.format(value); + } + } + return obj; + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java index be397bcbb2f2c..8fae5720a9082 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/InternalTerms.java @@ -57,11 +57,12 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Function; import static org.opensearch.search.aggregations.InternalOrder.isKeyAsc; import static org.opensearch.search.aggregations.InternalOrder.isKeyOrder; -public abstract class InternalTerms, B extends InternalTerms.Bucket> extends +public abstract class InternalTerms, B extends InternalTerms.AbstractInternalBucket> extends InternalMultiBucketAggregation implements Terms { @@ -69,10 +70,15 @@ public abstract class InternalTerms, B extends Int protected static final ParseField DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME = new ParseField("doc_count_error_upper_bound"); protected static final ParseField SUM_OF_OTHER_DOC_COUNTS = new ParseField("sum_other_doc_count"); - public abstract static class Bucket> extends InternalMultiBucketAggregation.InternalBucket - implements - Terms.Bucket, - KeyComparable { + public abstract static class AbstractInternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Terms.Bucket { + abstract void setDocCountError(long docCountError); + + abstract void setDocCountError(Function updater); + + abstract boolean showDocCountError(); + } + + public abstract static class Bucket> extends AbstractInternalBucket implements KeyComparable { /** * Reads a bucket. Should be a constructor reference. */ @@ -142,6 +148,21 @@ public long getDocCountError() { return docCountError; } + @Override + public void setDocCountError(long docCountError) { + this.docCountError = docCountError; + } + + @Override + public void setDocCountError(Function updater) { + this.docCountError = updater.apply(this.docCountError); + } + + @Override + public boolean showDocCountError() { + return showDocCountError; + } + @Override public Aggregations getAggregations() { return aggregations; @@ -274,7 +295,7 @@ private long getDocCountError(InternalTerms terms) { } else { // otherwise use the doc count of the last term in the // aggregation - return terms.getBuckets().stream().mapToLong(Bucket::getDocCount).min().getAsLong(); + return terms.getBuckets().stream().mapToLong(MultiBucketsAggregation.Bucket::getDocCount).min().getAsLong(); } } else { return -1; @@ -393,7 +414,7 @@ public InternalAggregation reduce(List aggregations, Reduce // for the existing error calculated in a previous reduce. // Note that if the error is unbounded (-1) this will be fixed // later in this method. - bucket.docCountError -= thisAggDocCountError; + bucket.setDocCountError(docCountError -> docCountError - thisAggDocCountError); } } @@ -419,11 +440,12 @@ public InternalAggregation reduce(List aggregations, Reduce final BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, order.comparator()); for (B bucket : reducedBuckets) { if (sumDocCountError == -1) { - bucket.docCountError = -1; + bucket.setDocCountError(-1); } else { - bucket.docCountError += sumDocCountError; + final long finalSumDocCountError = sumDocCountError; + bucket.setDocCountError(docCountError -> docCountError + finalSumDocCountError); } - if (bucket.docCount >= minDocCount) { + if (bucket.getDocCount() >= minDocCount) { B removed = ordered.insertWithOverflow(bucket); if (removed != null) { otherDocCount += removed.getDocCount(); @@ -448,9 +470,10 @@ public InternalAggregation reduce(List aggregations, Reduce reduceContext.consumeBucketsAndMaybeBreak(1); list[i] = reducedBuckets.get(i); if (sumDocCountError == -1) { - list[i].docCountError = -1; + list[i].setDocCountError(-1); } else { - list[i].docCountError += sumDocCountError; + final long fSumDocCountError = sumDocCountError; + list[i].setDocCountError(docCountError -> docCountError + fSumDocCountError); } } } @@ -474,15 +497,15 @@ protected B reduceBucket(List buckets, ReduceContext context) { long docCountError = 0; List aggregationsList = new ArrayList<>(buckets.size()); for (B bucket : buckets) { - docCount += bucket.docCount; + docCount += bucket.getDocCount(); if (docCountError != -1) { - if (bucket.docCountError == -1) { + if (bucket.showDocCountError() == false || bucket.getDocCountError() == -1) { docCountError = -1; } else { - docCountError += bucket.docCountError; + docCountError += bucket.getDocCountError(); } } - aggregationsList.add(bucket.aggregations); + aggregationsList.add((InternalAggregations) bucket.getAggregations()); } InternalAggregations aggs = InternalAggregations.reduce(aggregationsList, context); return createBucket(docCount, aggs, docCountError, buckets.get(0)); @@ -524,12 +547,12 @@ protected static XContentBuilder doXContentCommon( Params params, long docCountError, long otherDocCount, - List buckets + List buckets ) throws IOException { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), docCountError); builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); - for (Bucket bucket : buckets) { + for (AbstractInternalBucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilder.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilder.java new file mode 100644 index 0000000000000..78be4f980bce5 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilder.java @@ -0,0 +1,443 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.opensearch.common.ParseField; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.LoggingDeprecationHandler; +import org.opensearch.common.xcontent.ObjectParser; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.aggregations.AbstractAggregationBuilder; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.InternalOrder; +import org.opensearch.search.aggregations.support.MultiTermsValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.opensearch.search.aggregations.bucket.terms.TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS; + +/** + * Multi-terms aggregation supports collecting terms from multiple fields in the same document. + * + *

+ * For example, using the multi-terms aggregation to group by two fields region, host, calculate max cpu, and sort by max cpu. + *

+ *
+ *   GET test_000001/_search
+ *   {
+ *     "size": 0,
+ *     "aggs": {
+ *       "hot": {
+ *         "multi_terms": {
+ *           "terms": [{
+ *             "field": "region"
+ *           },{
+ *             "field": "host"
+ *           }],
+ *           "order": {"max-cpu": "desc"}
+ *         },
+ *         "aggs": {
+ *           "max-cpu": { "max": { "field": "cpu" } }
+ *         }
+ *       }
+ *     }
+ *   }
+ * 
+ * + *

+ * The aggregation result contains + * - key: a list of value extract from multiple fields in the same doc. + *

+ *
+ *   {
+ *     "hot": {
+ *       "doc_count_error_upper_bound": 0,
+ *       "sum_other_doc_count": 0,
+ *       "buckets": [
+ *         {
+ *           "key": [
+ *             "dub",
+ *             "h1"
+ *           ],
+ *           "key_as_string": "dub|h1",
+ *           "doc_count": 2,
+ *           "max-cpu": {
+ *             "value": 90.0
+ *           }
+ *         },
+ *         {
+ *           "key": [
+ *             "dub",
+ *             "h2"
+ *           ],
+ *           "key_as_string": "dub|h2",
+ *           "doc_count": 2,
+ *           "max-cpu": {
+ *             "value": 70.0
+ *           }
+ *         }
+ *       ]
+ *     }
+ *   }
+ * 
+ * + *

+ * Notes: The current implementation focuses on adding new type aggregates. Performance (latency) is not good,mainly because of + * simply encoding/decoding a list of values as bucket keys. + *

+ */ +public class MultiTermsAggregationBuilder extends AbstractAggregationBuilder { + public static final String NAME = "multi_terms"; + public static final ObjectParser PARSER = ObjectParser.fromBuilder( + NAME, + MultiTermsAggregationBuilder::new + ); + + public static final ParseField TERMS_FIELD = new ParseField("terms"); + public static final ParseField SHARD_SIZE_FIELD_NAME = new ParseField("shard_size"); + public static final ParseField MIN_DOC_COUNT_FIELD_NAME = new ParseField("min_doc_count"); + public static final ParseField SHARD_MIN_DOC_COUNT_FIELD_NAME = new ParseField("shard_min_doc_count"); + public static final ParseField REQUIRED_SIZE_FIELD_NAME = new ParseField("size"); + public static final ParseField SHOW_TERM_DOC_COUNT_ERROR = new ParseField("show_term_doc_count_error"); + public static final ParseField ORDER_FIELD = new ParseField("order"); + + @Override + public String getType() { + return NAME; + } + + static { + final ObjectParser parser = MultiTermsValuesSourceConfig.PARSER.apply( + true, + true, + true, + true + ); + PARSER.declareObjectArray(MultiTermsAggregationBuilder::terms, (p, c) -> parser.parse(p, null).build(), TERMS_FIELD); + + PARSER.declareBoolean(MultiTermsAggregationBuilder::showTermDocCountError, SHOW_TERM_DOC_COUNT_ERROR); + + PARSER.declareInt(MultiTermsAggregationBuilder::shardSize, SHARD_SIZE_FIELD_NAME); + + PARSER.declareLong(MultiTermsAggregationBuilder::minDocCount, MIN_DOC_COUNT_FIELD_NAME); + + PARSER.declareLong(MultiTermsAggregationBuilder::shardMinDocCount, SHARD_MIN_DOC_COUNT_FIELD_NAME); + + PARSER.declareInt(MultiTermsAggregationBuilder::size, REQUIRED_SIZE_FIELD_NAME); + + PARSER.declareObjectArray(MultiTermsAggregationBuilder::order, (p, c) -> InternalOrder.Parser.parseOrderParam(p), ORDER_FIELD); + + PARSER.declareField( + MultiTermsAggregationBuilder::collectMode, + (p, c) -> Aggregator.SubAggCollectionMode.parse(p.text(), LoggingDeprecationHandler.INSTANCE), + Aggregator.SubAggCollectionMode.KEY, + ObjectParser.ValueType.STRING + ); + } + + public static final ValuesSourceRegistry.RegistryKey REGISTRY_KEY = + new ValuesSourceRegistry.RegistryKey<>( + MultiTermsAggregationBuilder.NAME, + MultiTermsAggregationFactory.InternalValuesSourceSupplier.class + ); + + private List terms; + + private BucketOrder order = BucketOrder.compound(BucketOrder.count(false)); // automatically adds tie-breaker key asc order + private Aggregator.SubAggCollectionMode collectMode = null; + private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds( + DEFAULT_BUCKET_COUNT_THRESHOLDS + ); + private boolean showTermDocCountError = false; + + public MultiTermsAggregationBuilder(String name) { + super(name); + } + + protected MultiTermsAggregationBuilder( + MultiTermsAggregationBuilder clone, + AggregatorFactories.Builder factoriesBuilder, + Map metadata + ) { + super(clone, factoriesBuilder, metadata); + this.terms = new ArrayList<>(clone.terms); + this.order = clone.order; + this.collectMode = clone.collectMode; + this.bucketCountThresholds = new TermsAggregator.BucketCountThresholds(clone.bucketCountThresholds); + this.showTermDocCountError = clone.showTermDocCountError; + } + + @Override + protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map metadata) { + return new MultiTermsAggregationBuilder(this, factoriesBuilder, metadata); + } + + /** + * Read from a stream. + */ + public MultiTermsAggregationBuilder(StreamInput in) throws IOException { + super(in); + terms = in.readList(MultiTermsValuesSourceConfig::new); + bucketCountThresholds = new TermsAggregator.BucketCountThresholds(in); + collectMode = in.readOptionalWriteable(Aggregator.SubAggCollectionMode::readFromStream); + order = InternalOrder.Streams.readOrder(in); + showTermDocCountError = in.readBoolean(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeList(terms); + bucketCountThresholds.writeTo(out); + out.writeOptionalWriteable(collectMode); + order.writeTo(out); + out.writeBoolean(showTermDocCountError); + } + + @Override + protected AggregatorFactory doBuild( + QueryShardContext queryShardContext, + AggregatorFactory parent, + AggregatorFactories.Builder subfactoriesBuilder + ) throws IOException { + return new MultiTermsAggregationFactory( + name, + queryShardContext, + parent, + subfactoriesBuilder, + metadata, + terms, + order, + collectMode, + bucketCountThresholds, + showTermDocCountError + ); + } + + @Override + protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (terms != null) { + builder.field(TERMS_FIELD.getPreferredName(), terms); + } + bucketCountThresholds.toXContent(builder, params); + builder.field(SHOW_TERM_DOC_COUNT_ERROR.getPreferredName(), showTermDocCountError); + builder.field(ORDER_FIELD.getPreferredName()); + order.toXContent(builder, params); + if (collectMode != null) { + builder.field(Aggregator.SubAggCollectionMode.KEY.getPreferredName(), collectMode.parseField().getPreferredName()); + } + builder.endObject(); + return builder; + } + + /** + * Set the terms. + */ + public MultiTermsAggregationBuilder terms(List terms) { + if (terms == null) { + throw new IllegalArgumentException("[terms] must not be null. Found null terms in [" + name + "]"); + } + if (terms.size() < 2) { + throw new IllegalArgumentException( + "multi term aggregation must has at least 2 terms. Found [" + + terms.size() + + "] in" + + " [" + + name + + "]" + + (terms.size() == 1 ? " Use terms aggregation for single term aggregation" : "") + ); + } + this.terms = terms; + return this; + } + + /** + * Sets the size - indicating how many term buckets should be returned + * (defaults to 10) + */ + public MultiTermsAggregationBuilder size(int size) { + if (size <= 0) { + throw new IllegalArgumentException("[size] must be greater than 0. Found [" + size + "] in [" + name + "]"); + } + bucketCountThresholds.setRequiredSize(size); + return this; + } + + /** + * Returns the number of term buckets currently configured + */ + public int size() { + return bucketCountThresholds.getRequiredSize(); + } + + /** + * Sets the shard_size - indicating the number of term buckets each shard + * will return to the coordinating node (the node that coordinates the + * search execution). The higher the shard size is, the more accurate the + * results are. + */ + public MultiTermsAggregationBuilder shardSize(int shardSize) { + if (shardSize <= 0) { + throw new IllegalArgumentException("[shardSize] must be greater than 0. Found [" + shardSize + "] in [" + name + "]"); + } + bucketCountThresholds.setShardSize(shardSize); + return this; + } + + /** + * Returns the number of term buckets per shard that are currently configured + */ + public int shardSize() { + return bucketCountThresholds.getShardSize(); + } + + /** + * Set the minimum document count terms should have in order to appear in + * the response. + */ + public MultiTermsAggregationBuilder minDocCount(long minDocCount) { + if (minDocCount < 0) { + throw new IllegalArgumentException( + "[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]" + ); + } + bucketCountThresholds.setMinDocCount(minDocCount); + return this; + } + + /** + * Returns the minimum document count required per term + */ + public long minDocCount() { + return bucketCountThresholds.getMinDocCount(); + } + + /** + * Set the minimum document count terms should have on the shard in order to + * appear in the response. + */ + public MultiTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) { + if (shardMinDocCount < 0) { + throw new IllegalArgumentException( + "[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]" + ); + } + bucketCountThresholds.setShardMinDocCount(shardMinDocCount); + return this; + } + + /** + * Returns the minimum document count required per term, per shard + */ + public long shardMinDocCount() { + return bucketCountThresholds.getShardMinDocCount(); + } + + /** Set a new order on this builder and return the builder so that calls + * can be chained. A tie-breaker may be added to avoid non-deterministic ordering. */ + public MultiTermsAggregationBuilder order(BucketOrder order) { + if (order == null) { + throw new IllegalArgumentException("[order] must not be null: [" + name + "]"); + } + if (order instanceof InternalOrder.CompoundOrder || InternalOrder.isKeyOrder(order)) { + this.order = order; // if order already contains a tie-breaker we are good to go + } else { // otherwise add a tie-breaker by using a compound order + this.order = BucketOrder.compound(order); + } + return this; + } + + /** + * Sets the order in which the buckets will be returned. A tie-breaker may be added to avoid non-deterministic + * ordering. + */ + public MultiTermsAggregationBuilder order(List orders) { + if (orders == null) { + throw new IllegalArgumentException("[orders] must not be null: [" + name + "]"); + } + // if the list only contains one order use that to avoid inconsistent xcontent + order(orders.size() > 1 ? BucketOrder.compound(orders) : orders.get(0)); + return this; + } + + /** + * Gets the order in which the buckets will be returned. + */ + public BucketOrder order() { + return order; + } + + /** + * Expert: set the collection mode. + */ + public MultiTermsAggregationBuilder collectMode(Aggregator.SubAggCollectionMode collectMode) { + if (collectMode == null) { + throw new IllegalArgumentException("[collectMode] must not be null: [" + name + "]"); + } + this.collectMode = collectMode; + return this; + } + + /** + * Expert: get the collection mode. + */ + public Aggregator.SubAggCollectionMode collectMode() { + return collectMode; + } + + /** + * Get whether doc count error will be return for individual terms + */ + public boolean showTermDocCountError() { + return showTermDocCountError; + } + + /** + * Set whether doc count error will be return for individual terms + */ + public MultiTermsAggregationBuilder showTermDocCountError(boolean showTermDocCountError) { + this.showTermDocCountError = showTermDocCountError; + return this; + } + + @Override + public BucketCardinality bucketCardinality() { + return BucketCardinality.MANY; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), bucketCountThresholds, collectMode, order, showTermDocCountError); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null || getClass() != obj.getClass()) return false; + if (super.equals(obj) == false) return false; + MultiTermsAggregationBuilder other = (MultiTermsAggregationBuilder) obj; + return Objects.equals(terms, other.terms) + && Objects.equals(bucketCountThresholds, other.bucketCountThresholds) + && Objects.equals(collectMode, other.collectMode) + && Objects.equals(order, other.order) + && Objects.equals(showTermDocCountError, other.showTermDocCountError); + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationFactory.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationFactory.java new file mode 100644 index 0000000000000..d5600bc030bf2 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationFactory.java @@ -0,0 +1,163 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.opensearch.common.collect.Tuple; +import org.opensearch.index.query.QueryShardContext; +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.AggregatorFactory; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.CardinalityUpperBound; +import org.opensearch.search.aggregations.InternalOrder; +import org.opensearch.search.aggregations.bucket.BucketUtils; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.MultiTermsValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationBuilder.REGISTRY_KEY; + +/** + * Factory of {@link MultiTermsAggregator}. + */ +public class MultiTermsAggregationFactory extends AggregatorFactory { + + private final List> configs; + private final List formats; + /** + * Fields inherent from Terms Aggregation Factory. + */ + private final BucketOrder order; + private final Aggregator.SubAggCollectionMode collectMode; + private final TermsAggregator.BucketCountThresholds bucketCountThresholds; + private final boolean showTermDocCountError; + + public static void registerAggregators(ValuesSourceRegistry.Builder builder) { + builder.register( + REGISTRY_KEY, + org.opensearch.common.collect.List.of(CoreValuesSourceType.BYTES, CoreValuesSourceType.IP), + config -> { + final IncludeExclude.StringFilter filter = config.v2() == null + ? null + : config.v2().convertToStringFilter(config.v1().format()); + return MultiTermsAggregator.InternalValuesSourceFactory.bytesValuesSource(config.v1().getValuesSource(), filter); + }, + true + ); + + builder.register( + REGISTRY_KEY, + org.opensearch.common.collect.List.of(CoreValuesSourceType.NUMERIC, CoreValuesSourceType.BOOLEAN, CoreValuesSourceType.DATE), + config -> { + ValuesSourceConfig valuesSourceConfig = config.v1(); + IncludeExclude includeExclude = config.v2(); + ValuesSource.Numeric valuesSource = ((ValuesSource.Numeric) valuesSourceConfig.getValuesSource()); + IncludeExclude.LongFilter longFilter = null; + if (valuesSource.isFloatingPoint()) { + if (includeExclude != null) { + longFilter = includeExclude.convertToDoubleFilter(); + } + return MultiTermsAggregator.InternalValuesSourceFactory.doubleValueSource(valuesSource, longFilter); + } else { + if (includeExclude != null) { + longFilter = includeExclude.convertToLongFilter(valuesSourceConfig.format()); + } + return MultiTermsAggregator.InternalValuesSourceFactory.longValuesSource(valuesSource, longFilter); + } + }, + true + ); + + builder.registerUsage(MultiTermsAggregationBuilder.NAME); + } + + public MultiTermsAggregationFactory( + String name, + QueryShardContext queryShardContext, + AggregatorFactory parent, + AggregatorFactories.Builder subFactoriesBuilder, + Map metadata, + List multiTermConfigs, + BucketOrder order, + Aggregator.SubAggCollectionMode collectMode, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + boolean showTermDocCountError + ) throws IOException { + super(name, queryShardContext, parent, subFactoriesBuilder, metadata); + this.configs = multiTermConfigs.stream() + .map( + c -> new Tuple( + ValuesSourceConfig.resolveUnregistered( + queryShardContext, + c.getUserValueTypeHint(), + c.getFieldName(), + c.getScript(), + c.getMissing(), + c.getTimeZone(), + c.getFormat(), + CoreValuesSourceType.BYTES + ), + c.getIncludeExclude() + ) + ) + .collect(Collectors.toList()); + this.formats = this.configs.stream().map(c -> c.v1().format()).collect(Collectors.toList()); + this.order = order; + this.collectMode = collectMode; + this.bucketCountThresholds = bucketCountThresholds; + this.showTermDocCountError = showTermDocCountError; + } + + @Override + protected Aggregator createInternal( + SearchContext searchContext, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { + TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(this.bucketCountThresholds); + if (InternalOrder.isKeyOrder(order) == false + && bucketCountThresholds.getShardSize() == TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) { + // The user has not made a shardSize selection. Use default + // heuristic to avoid any wrong-ranking caused by distributed + // counting + bucketCountThresholds.setShardSize(BucketUtils.suggestShardSideQueueSize(bucketCountThresholds.getRequiredSize())); + } + bucketCountThresholds.ensureValidity(); + return new MultiTermsAggregator( + name, + factories, + showTermDocCountError, + configs.stream() + .map(config -> queryShardContext.getValuesSourceRegistry().getAggregator(REGISTRY_KEY, config.v1()).build(config)) + .collect(Collectors.toList()), + configs.stream().map(c -> c.v1().format()).collect(Collectors.toList()), + order, + collectMode, + bucketCountThresholds, + searchContext, + parent, + cardinality, + metadata + ); + } + + public interface InternalValuesSourceSupplier { + MultiTermsAggregator.InternalValuesSource build(Tuple config); + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregator.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregator.java new file mode 100644 index 0000000000000..36bf710f74398 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregator.java @@ -0,0 +1,438 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.NumericUtils; +import org.apache.lucene.util.PriorityQueue; +import org.opensearch.ExceptionsHelper; +import org.opensearch.common.CheckedSupplier; +import org.opensearch.common.bytes.BytesArray; +import org.opensearch.common.io.stream.BytesStreamOutput; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.lease.Releasables; +import org.opensearch.index.fielddata.SortedBinaryDocValues; +import org.opensearch.index.fielddata.SortedNumericDoubleValues; +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.AggregatorFactories; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.CardinalityUpperBound; +import org.opensearch.search.aggregations.InternalAggregation; +import org.opensearch.search.aggregations.InternalOrder; +import org.opensearch.search.aggregations.LeafBucketCollector; +import org.opensearch.search.aggregations.bucket.DeferableBucketAggregator; +import org.opensearch.search.aggregations.support.AggregationPath; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.opensearch.search.aggregations.InternalOrder.isKeyOrder; +import static org.opensearch.search.aggregations.bucket.terms.TermsAggregator.descendsFromNestedAggregator; + +/** + * An aggregator that aggregate with multi_terms. + */ +public class MultiTermsAggregator extends DeferableBucketAggregator { + + private final BytesKeyedBucketOrds bucketOrds; + private final MultiTermsValuesSource multiTermsValue; + private final boolean showTermDocCountError; + private final List formats; + private final TermsAggregator.BucketCountThresholds bucketCountThresholds; + private final BucketOrder order; + private final Comparator partiallyBuiltBucketComparator; + private final SubAggCollectionMode collectMode; + private final Set aggsUsedForSorting = new HashSet<>(); + + public MultiTermsAggregator( + String name, + AggregatorFactories factories, + boolean showTermDocCountError, + List internalValuesSources, + List formats, + BucketOrder order, + SubAggCollectionMode collectMode, + TermsAggregator.BucketCountThresholds bucketCountThresholds, + SearchContext context, + Aggregator parent, + CardinalityUpperBound cardinality, + Map metadata + ) throws IOException { + super(name, factories, context, parent, metadata); + this.bucketOrds = BytesKeyedBucketOrds.build(context.bigArrays(), cardinality); + this.multiTermsValue = new MultiTermsValuesSource(internalValuesSources); + this.showTermDocCountError = showTermDocCountError; + this.formats = formats; + this.bucketCountThresholds = bucketCountThresholds; + this.order = order; + this.partiallyBuiltBucketComparator = order == null ? null : order.partiallyBuiltBucketComparator(b -> b.bucketOrd, this); + // Todo, copy from TermsAggregator. need to remove duplicate code. + if (subAggsNeedScore() && descendsFromNestedAggregator(parent)) { + /** + * Force the execution to depth_first because we need to access the score of + * nested documents in a sub-aggregation and we are not able to generate this score + * while replaying deferred documents. + */ + this.collectMode = SubAggCollectionMode.DEPTH_FIRST; + } else { + this.collectMode = collectMode; + } + // Don't defer any child agg if we are dependent on it for pruning results + if (order instanceof InternalOrder.Aggregation) { + AggregationPath path = ((InternalOrder.Aggregation) order).path(); + aggsUsedForSorting.add(path.resolveTopmostAggregator(this)); + } else if (order instanceof InternalOrder.CompoundOrder) { + InternalOrder.CompoundOrder compoundOrder = (InternalOrder.CompoundOrder) order; + for (BucketOrder orderElement : compoundOrder.orderElements()) { + if (orderElement instanceof InternalOrder.Aggregation) { + AggregationPath path = ((InternalOrder.Aggregation) orderElement).path(); + aggsUsedForSorting.add(path.resolveTopmostAggregator(this)); + } + } + } + } + + @Override + public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { + InternalMultiTerms.Bucket[][] topBucketsPerOrd = new InternalMultiTerms.Bucket[owningBucketOrds.length][]; + long[] otherDocCounts = new long[owningBucketOrds.length]; + for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + collectZeroDocEntriesIfNeeded(owningBucketOrds[ordIdx]); + long bucketsInOrd = bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]); + + int size = (int) Math.min(bucketsInOrd, bucketCountThresholds.getShardSize()); + PriorityQueue ordered = new BucketPriorityQueue<>(size, partiallyBuiltBucketComparator); + InternalMultiTerms.Bucket spare = null; + BytesRef dest = null; + BytesKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); + CheckedSupplier emptyBucketBuilder = () -> InternalMultiTerms.Bucket.EMPTY( + showTermDocCountError, + formats + ); + while (ordsEnum.next()) { + long docCount = bucketDocCount(ordsEnum.ord()); + otherDocCounts[ordIdx] += docCount; + if (docCount < bucketCountThresholds.getShardMinDocCount()) { + continue; + } + if (spare == null) { + spare = emptyBucketBuilder.get(); + dest = new BytesRef(); + } + + ordsEnum.readValue(dest); + + spare.termValues = decode(dest); + spare.docCount = docCount; + spare.bucketOrd = ordsEnum.ord(); + spare = ordered.insertWithOverflow(spare); + } + + // Get the top buckets + InternalMultiTerms.Bucket[] bucketsForOrd = new InternalMultiTerms.Bucket[ordered.size()]; + topBucketsPerOrd[ordIdx] = bucketsForOrd; + for (int b = ordered.size() - 1; b >= 0; --b) { + topBucketsPerOrd[ordIdx][b] = ordered.pop(); + otherDocCounts[ordIdx] -= topBucketsPerOrd[ordIdx][b].getDocCount(); + } + } + + buildSubAggsForAllBuckets(topBucketsPerOrd, b -> b.bucketOrd, (b, aggs) -> b.aggregations = aggs); + + InternalAggregation[] result = new InternalAggregation[owningBucketOrds.length]; + for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { + result[ordIdx] = buildResult(owningBucketOrds[ordIdx], otherDocCounts[ordIdx], topBucketsPerOrd[ordIdx]); + } + return result; + } + + InternalMultiTerms buildResult(long owningBucketOrd, long otherDocCount, InternalMultiTerms.Bucket[] topBuckets) { + BucketOrder reduceOrder; + if (isKeyOrder(order) == false) { + reduceOrder = InternalOrder.key(true); + Arrays.sort(topBuckets, reduceOrder.comparator()); + } else { + reduceOrder = order; + } + return new InternalMultiTerms( + name, + reduceOrder, + order, + bucketCountThresholds.getRequiredSize(), + bucketCountThresholds.getMinDocCount(), + metadata(), + bucketCountThresholds.getShardSize(), + showTermDocCountError, + otherDocCount, + 0, + formats, + org.opensearch.common.collect.List.of(topBuckets) + ); + } + + @Override + public InternalAggregation buildEmptyAggregation() { + return null; + } + + @Override + protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException { + MultiTermsValuesSourceCollector collector = multiTermsValue.getValues(ctx); + return new LeafBucketCollector() { + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + for (List value : collector.apply(doc)) { + long bucketOrd = bucketOrds.add(owningBucketOrd, encode(value)); + if (bucketOrd < 0) { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } + } + } + }; + } + + @Override + protected void doClose() { + Releasables.close(bucketOrds); + } + + private static BytesRef encode(List values) { + try (BytesStreamOutput output = new BytesStreamOutput()) { + output.writeCollection(values, StreamOutput::writeGenericValue); + return output.bytes().toBytesRef(); + } catch (IOException e) { + throw ExceptionsHelper.convertToRuntime(e); + } + } + + private static List decode(BytesRef bytesRef) { + try (StreamInput input = new BytesArray(bytesRef).streamInput()) { + return input.readList(StreamInput::readGenericValue); + } catch (IOException e) { + throw ExceptionsHelper.convertToRuntime(e); + } + } + + private boolean subAggsNeedScore() { + for (Aggregator subAgg : subAggregators) { + if (subAgg.scoreMode().needsScores()) { + return true; + } + } + return false; + } + + @Override + protected boolean shouldDefer(Aggregator aggregator) { + return collectMode == Aggregator.SubAggCollectionMode.BREADTH_FIRST && !aggsUsedForSorting.contains(aggregator); + } + + private void collectZeroDocEntriesIfNeeded(long owningBucketOrd) throws IOException { + if (bucketCountThresholds.getMinDocCount() != 0) { + return; + } + if (InternalOrder.isCountDesc(order) && bucketOrds.bucketsInOrd(owningBucketOrd) >= bucketCountThresholds.getRequiredSize()) { + return; + } + // we need to fill-in the blanks + for (LeafReaderContext ctx : context.searcher().getTopReaderContext().leaves()) { + MultiTermsValuesSourceCollector collector = multiTermsValue.getValues(ctx); + // brute force + for (int docId = 0; docId < ctx.reader().maxDoc(); ++docId) { + for (List value : collector.apply(docId)) { + bucketOrds.add(owningBucketOrd, encode(value)); + } + } + } + } + + /** + * A multi_terms collector which collect values on each doc, + */ + @FunctionalInterface + interface MultiTermsValuesSourceCollector { + /** + * Collect a list values of multi_terms on each doc. + * Each terms could have multi_values, so the result is the cartesian product of each term's values. + */ + List> apply(int doc) throws IOException; + } + + @FunctionalInterface + interface InternalValuesSource { + /** + * Create {@link InternalValuesSourceCollector} from existing {@link LeafReaderContext}. + */ + InternalValuesSourceCollector apply(LeafReaderContext ctx) throws IOException; + } + + /** + * A terms collector which collect values on each doc, + */ + @FunctionalInterface + interface InternalValuesSourceCollector { + /** + * Collect a list values of a term on specific doc. + */ + List apply(int doc) throws IOException; + } + + /** + * Multi_Term ValuesSource, it is a collection of {@link InternalValuesSource} + */ + static class MultiTermsValuesSource { + private final List valuesSources; + + public MultiTermsValuesSource(List valuesSources) { + this.valuesSources = valuesSources; + } + + public MultiTermsValuesSourceCollector getValues(LeafReaderContext ctx) throws IOException { + List collectors = new ArrayList<>(); + for (InternalValuesSource valuesSource : valuesSources) { + collectors.add(valuesSource.apply(ctx)); + } + return new MultiTermsValuesSourceCollector() { + @Override + public List> apply(int doc) throws IOException { + List, IOException>> collectedValues = new ArrayList<>(); + for (InternalValuesSourceCollector collector : collectors) { + collectedValues.add(() -> collector.apply(doc)); + } + List> result = new ArrayList<>(); + apply(0, collectedValues, new ArrayList<>(), result); + return result; + } + + /** + * DFS traverse each term's values and add cartesian product to results lists. + */ + private void apply( + int index, + List, IOException>> collectedValues, + List current, + List> results + ) throws IOException { + if (index == collectedValues.size()) { + results.add(org.opensearch.common.collect.List.copyOf(current)); + } else if (null != collectedValues.get(index)) { + for (Object value : collectedValues.get(index).get()) { + current.add(value); + apply(index + 1, collectedValues, current, results); + current.remove(current.size() - 1); + } + } + } + }; + } + } + + /** + * Factory for construct {@link InternalValuesSource}. + */ + static class InternalValuesSourceFactory { + static InternalValuesSource bytesValuesSource(ValuesSource valuesSource, IncludeExclude.StringFilter includeExclude) { + return ctx -> { + SortedBinaryDocValues values = valuesSource.bytesValues(ctx); + return doc -> { + BytesRefBuilder previous = new BytesRefBuilder(); + + if (false == values.advanceExact(doc)) { + return Collections.emptyList(); + } + int valuesCount = values.docValueCount(); + List termValues = new ArrayList<>(valuesCount); + + // SortedBinaryDocValues don't guarantee uniqueness so we + // need to take care of dups + previous.clear(); + for (int i = 0; i < valuesCount; ++i) { + BytesRef bytes = values.nextValue(); + if (includeExclude != null && false == includeExclude.accept(bytes)) { + continue; + } + if (i > 0 && previous.get().equals(bytes)) { + continue; + } + previous.copyBytes(bytes); + termValues.add(BytesRef.deepCopyOf(bytes)); + } + return termValues; + }; + }; + } + + static InternalValuesSource longValuesSource(ValuesSource.Numeric valuesSource, IncludeExclude.LongFilter longFilter) { + return ctx -> { + SortedNumericDocValues values = valuesSource.longValues(ctx); + return doc -> { + if (values.advanceExact(doc)) { + int valuesCount = values.docValueCount(); + + long previous = Long.MAX_VALUE; + List termValues = new ArrayList<>(valuesCount); + for (int i = 0; i < valuesCount; ++i) { + long val = values.nextValue(); + if (previous != val || i == 0) { + if (longFilter == null || longFilter.accept(val)) { + termValues.add(val); + } + previous = val; + } + } + return termValues; + } + return Collections.emptyList(); + }; + }; + } + + static InternalValuesSource doubleValueSource(ValuesSource.Numeric valuesSource, IncludeExclude.LongFilter longFilter) { + return ctx -> { + SortedNumericDoubleValues values = valuesSource.doubleValues(ctx); + return doc -> { + if (values.advanceExact(doc)) { + int valuesCount = values.docValueCount(); + + double previous = Double.MAX_VALUE; + List termValues = new ArrayList<>(valuesCount); + for (int i = 0; i < valuesCount; ++i) { + double val = values.nextValue(); + if (previous != val || i == 0) { + if (longFilter == null || longFilter.accept(NumericUtils.doubleToSortableLong(val))) { + termValues.add(val); + } + previous = val; + } + } + return termValues; + } + return Collections.emptyList(); + }; + }; + } + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedMultiTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedMultiTerms.java new file mode 100644 index 0000000000000..8686d329fa3b2 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedMultiTerms.java @@ -0,0 +1,77 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.opensearch.common.xcontent.ObjectParser; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; + +public class ParsedMultiTerms extends ParsedTerms { + @Override + public String getType() { + return MultiTermsAggregationBuilder.NAME; + } + + private static final ObjectParser PARSER = new ObjectParser<>( + ParsedMultiTerms.class.getSimpleName(), + true, + ParsedMultiTerms::new + ); + static { + declareParsedTermsFields(PARSER, ParsedBucket::fromXContent); + } + + public static ParsedMultiTerms fromXContent(XContentParser parser, String name) throws IOException { + ParsedMultiTerms aggregation = PARSER.parse(parser, null); + aggregation.setName(name); + return aggregation; + } + + public static class ParsedBucket extends ParsedTerms.ParsedBucket { + + private List key; + + @Override + public List getKey() { + return key; + } + + @Override + public String getKeyAsString() { + String keyAsString = super.getKeyAsString(); + if (keyAsString != null) { + return keyAsString; + } + if (key != null) { + return key.toString(); + } + return null; + } + + public Number getKeyAsNumber() { + throw new UnsupportedOperationException("not implemented"); + } + + @Override + protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + builder.field(CommonFields.KEY.getPreferredName(), key); + if (super.getKeyAsString() != null) { + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); + } + return builder; + } + + static ParsedBucket fromXContent(XContentParser parser) throws IOException { + return parseTermsBucketXContent(parser, ParsedBucket::new, (p, bucket) -> { bucket.key = p.list(); }); + } + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedTerms.java b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedTerms.java index ce5f56c898fa6..054ea7d827053 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedTerms.java +++ b/server/src/main/java/org/opensearch/search/aggregations/bucket/terms/ParsedTerms.java @@ -139,13 +139,16 @@ static B parseTermsBucketXContent( XContentParser.Token token; String currentFieldName = parser.currentName(); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + // field value could be list, e.g. multi_terms aggregation. + if ((token.isValue() || token == XContentParser.Token.START_ARRAY) + && CommonFields.KEY.getPreferredName().equals(currentFieldName)) { + keyConsumer.accept(parser, bucket); + } if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (CommonFields.KEY_AS_STRING.getPreferredName().equals(currentFieldName)) { bucket.setKeyAsString(parser.text()); - } else if (CommonFields.KEY.getPreferredName().equals(currentFieldName)) { - keyConsumer.accept(parser, bucket); } else if (CommonFields.DOC_COUNT.getPreferredName().equals(currentFieldName)) { bucket.setDocCount(parser.longValue()); } else if (DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName().equals(currentFieldName)) { diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/BaseMultiValuesSourceFieldConfig.java b/server/src/main/java/org/opensearch/search/aggregations/support/BaseMultiValuesSourceFieldConfig.java new file mode 100644 index 0000000000000..c75ab861439d3 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/support/BaseMultiValuesSourceFieldConfig.java @@ -0,0 +1,216 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.support; + +import org.opensearch.LegacyESVersion; +import org.opensearch.common.ParseField; +import org.opensearch.common.Strings; +import org.opensearch.common.TriConsumer; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.time.DateUtils; +import org.opensearch.common.xcontent.ObjectParser; +import org.opensearch.common.xcontent.ToXContentObject; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.script.Script; + +import java.io.IOException; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.util.Objects; + +/** + * A configuration that tells aggregation how to retrieve data from index + * in order to run a specific aggregation. + */ +public abstract class BaseMultiValuesSourceFieldConfig implements Writeable, ToXContentObject { + private final String fieldName; + private final Object missing; + private final Script script; + private final ZoneId timeZone; + + static TriConsumer< + ObjectParser>, Void>, + Boolean, + Boolean> PARSER = (parser, scriptable, timezoneAware) -> { + parser.declareString(Builder::setFieldName, ParseField.CommonFields.FIELD); + parser.declareField( + Builder::setMissing, + XContentParser::objectText, + ParseField.CommonFields.MISSING, + ObjectParser.ValueType.VALUE + ); + + if (scriptable) { + parser.declareField( + Builder::setScript, + (p, context) -> Script.parse(p), + Script.SCRIPT_PARSE_FIELD, + ObjectParser.ValueType.OBJECT_OR_STRING + ); + } + + if (timezoneAware) { + parser.declareField(Builder::setTimeZone, p -> { + if (p.currentToken() == XContentParser.Token.VALUE_STRING) { + return ZoneId.of(p.text()); + } else { + return ZoneOffset.ofHours(p.intValue()); + } + }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); + } + }; + + public BaseMultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone) { + this.fieldName = fieldName; + this.missing = missing; + this.script = script; + this.timeZone = timeZone; + } + + public BaseMultiValuesSourceFieldConfig(StreamInput in) throws IOException { + if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { + this.fieldName = in.readOptionalString(); + } else { + this.fieldName = in.readString(); + } + this.missing = in.readGenericValue(); + this.script = in.readOptionalWriteable(Script::new); + if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { + this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); + } else { + this.timeZone = in.readOptionalZoneId(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { + out.writeOptionalString(fieldName); + } else { + out.writeString(fieldName); + } + out.writeGenericValue(missing); + out.writeOptionalWriteable(script); + if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { + out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); + } else { + out.writeOptionalZoneId(timeZone); + } + doWriteTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (missing != null) { + builder.field(ParseField.CommonFields.MISSING.getPreferredName(), missing); + } + if (script != null) { + builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); + } + if (fieldName != null) { + builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName); + } + if (timeZone != null) { + builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getId()); + } + doXContentBody(builder, params); + builder.endObject(); + return builder; + } + + public Object getMissing() { + return missing; + } + + public Script getScript() { + return script; + } + + public ZoneId getTimeZone() { + return timeZone; + } + + public String getFieldName() { + return fieldName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + BaseMultiValuesSourceFieldConfig that = (BaseMultiValuesSourceFieldConfig) o; + return Objects.equals(fieldName, that.fieldName) + && Objects.equals(missing, that.missing) + && Objects.equals(script, that.script) + && Objects.equals(timeZone, that.timeZone); + } + + @Override + public int hashCode() { + return Objects.hash(fieldName, missing, script, timeZone); + } + + @Override + public String toString() { + return Strings.toString(this); + } + + abstract void doXContentBody(XContentBuilder builder, Params params) throws IOException; + + abstract void doWriteTo(StreamOutput out) throws IOException; + + public abstract static class Builder> { + String fieldName; + Object missing = null; + Script script = null; + ZoneId timeZone = null; + + public String getFieldName() { + return fieldName; + } + + public B setFieldName(String fieldName) { + this.fieldName = fieldName; + return (B) this; + } + + public Object getMissing() { + return missing; + } + + public B setMissing(Object missing) { + this.missing = missing; + return (B) this; + } + + public Script getScript() { + return script; + } + + public B setScript(Script script) { + this.script = script; + return (B) this; + } + + public ZoneId getTimeZone() { + return timeZone; + } + + public B setTimeZone(ZoneId timeZone) { + this.timeZone = timeZone; + return (B) this; + } + + abstract public C build(); + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfig.java b/server/src/main/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfig.java new file mode 100644 index 0000000000000..3bc7f444c610d --- /dev/null +++ b/server/src/main/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfig.java @@ -0,0 +1,203 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.support; + +import org.opensearch.common.ParseField; +import org.opensearch.common.Strings; +import org.opensearch.common.io.stream.StreamInput; +import org.opensearch.common.io.stream.StreamOutput; +import org.opensearch.common.xcontent.ObjectParser; +import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.script.Script; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.bucket.terms.IncludeExclude; + +import java.io.IOException; +import java.time.ZoneId; +import java.util.Objects; + +/** + * A configuration that used by multi_terms aggregations. + */ +public class MultiTermsValuesSourceConfig extends BaseMultiValuesSourceFieldConfig { + private final ValueType userValueTypeHint; + private final String format; + private final IncludeExclude includeExclude; + + private static final String NAME = "field_config"; + public static final ParseField FILTER = new ParseField("filter"); + + public interface ParserSupplier { + ObjectParser apply( + Boolean scriptable, + Boolean timezoneAware, + Boolean valueTypeHinted, + Boolean formatted + ); + } + + public static final MultiTermsValuesSourceConfig.ParserSupplier PARSER = (scriptable, timezoneAware, valueTypeHinted, formatted) -> { + + ObjectParser parser = new ObjectParser<>( + MultiTermsValuesSourceConfig.NAME, + MultiTermsValuesSourceConfig.Builder::new + ); + + BaseMultiValuesSourceFieldConfig.PARSER.apply(parser, scriptable, timezoneAware); + + if (valueTypeHinted) { + parser.declareField( + MultiTermsValuesSourceConfig.Builder::setUserValueTypeHint, + p -> ValueType.lenientParse(p.text()), + ValueType.VALUE_TYPE, + ObjectParser.ValueType.STRING + ); + } + + if (formatted) { + parser.declareField( + MultiTermsValuesSourceConfig.Builder::setFormat, + XContentParser::text, + ParseField.CommonFields.FORMAT, + ObjectParser.ValueType.STRING + ); + } + + parser.declareField( + (b, v) -> b.setIncludeExclude(IncludeExclude.merge(b.getIncludeExclude(), v)), + IncludeExclude::parseExclude, + IncludeExclude.EXCLUDE_FIELD, + ObjectParser.ValueType.STRING_ARRAY + ); + + return parser; + }; + + protected MultiTermsValuesSourceConfig( + String fieldName, + Object missing, + Script script, + ZoneId timeZone, + ValueType userValueTypeHint, + String format, + IncludeExclude includeExclude + ) { + super(fieldName, missing, script, timeZone); + this.userValueTypeHint = userValueTypeHint; + this.format = format; + this.includeExclude = includeExclude; + } + + public MultiTermsValuesSourceConfig(StreamInput in) throws IOException { + super(in); + this.userValueTypeHint = in.readOptionalWriteable(ValueType::readFromStream); + this.format = in.readOptionalString(); + this.includeExclude = in.readOptionalWriteable(IncludeExclude::new); + } + + public ValueType getUserValueTypeHint() { + return userValueTypeHint; + } + + public String getFormat() { + return format; + } + + /** + * Get terms to include and exclude from the aggregation results + */ + public IncludeExclude getIncludeExclude() { + return includeExclude; + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + out.writeOptionalWriteable(userValueTypeHint); + out.writeOptionalString(format); + out.writeOptionalWriteable(includeExclude); + } + + @Override + public void doXContentBody(XContentBuilder builder, Params params) throws IOException { + if (userValueTypeHint != null) { + builder.field(AggregationBuilder.CommonFields.VALUE_TYPE.getPreferredName(), userValueTypeHint.getPreferredName()); + } + if (format != null) { + builder.field(AggregationBuilder.CommonFields.FORMAT.getPreferredName(), format); + } + if (includeExclude != null) { + includeExclude.toXContent(builder, params); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + if (super.equals(o) == false) return false; + + MultiTermsValuesSourceConfig that = (MultiTermsValuesSourceConfig) o; + return Objects.equals(userValueTypeHint, that.userValueTypeHint) + && Objects.equals(format, that.format) + && Objects.equals(includeExclude, that.includeExclude); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), userValueTypeHint, format, includeExclude); + } + + public static class Builder extends BaseMultiValuesSourceFieldConfig.Builder { + private ValueType userValueTypeHint = null; + private String format; + private IncludeExclude includeExclude = null; + + public IncludeExclude getIncludeExclude() { + return includeExclude; + } + + public Builder setIncludeExclude(IncludeExclude includeExclude) { + this.includeExclude = includeExclude; + return this; + } + + public ValueType getUserValueTypeHint() { + return userValueTypeHint; + } + + public Builder setUserValueTypeHint(ValueType userValueTypeHint) { + this.userValueTypeHint = userValueTypeHint; + return this; + } + + public String getFormat() { + return format; + } + + public Builder setFormat(String format) { + this.format = format; + return this; + } + + public MultiTermsValuesSourceConfig build() { + if (Strings.isNullOrEmpty(fieldName) && script == null) { + throw new IllegalArgumentException( + "[" + + ParseField.CommonFields.FIELD.getPreferredName() + + "] and [" + + Script.SCRIPT_PARSE_FIELD.getPreferredName() + + "] cannot both be null. " + + "Please specify one or the other." + ); + } + return new MultiTermsValuesSourceConfig(fieldName, missing, script, timeZone, userValueTypeHint, format, includeExclude); + } + } +} diff --git a/server/src/main/java/org/opensearch/search/aggregations/support/MultiValuesSourceFieldConfig.java b/server/src/main/java/org/opensearch/search/aggregations/support/MultiValuesSourceFieldConfig.java index 54450763148c8..ea9bbe8019276 100644 --- a/server/src/main/java/org/opensearch/search/aggregations/support/MultiValuesSourceFieldConfig.java +++ b/server/src/main/java/org/opensearch/search/aggregations/support/MultiValuesSourceFieldConfig.java @@ -38,26 +38,17 @@ import org.opensearch.common.TriFunction; import org.opensearch.common.io.stream.StreamInput; import org.opensearch.common.io.stream.StreamOutput; -import org.opensearch.common.io.stream.Writeable; -import org.opensearch.common.time.DateUtils; import org.opensearch.common.xcontent.ObjectParser; -import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; -import org.opensearch.common.xcontent.XContentParser; import org.opensearch.index.query.AbstractQueryBuilder; import org.opensearch.index.query.QueryBuilder; import org.opensearch.script.Script; import java.io.IOException; import java.time.ZoneId; -import java.time.ZoneOffset; import java.util.Objects; -public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject { - private final String fieldName; - private final Object missing; - private final Script script; - private final ZoneId timeZone; +public class MultiValuesSourceFieldConfig extends BaseMultiValuesSourceFieldConfig { private final QueryBuilder filter; private static final String NAME = "field_config"; @@ -73,32 +64,7 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject MultiValuesSourceFieldConfig.Builder::new ); - parser.declareString(MultiValuesSourceFieldConfig.Builder::setFieldName, ParseField.CommonFields.FIELD); - parser.declareField( - MultiValuesSourceFieldConfig.Builder::setMissing, - XContentParser::objectText, - ParseField.CommonFields.MISSING, - ObjectParser.ValueType.VALUE - ); - - if (scriptable) { - parser.declareField( - MultiValuesSourceFieldConfig.Builder::setScript, - (p, context) -> Script.parse(p), - Script.SCRIPT_PARSE_FIELD, - ObjectParser.ValueType.OBJECT_OR_STRING - ); - } - - if (timezoneAware) { - parser.declareField(MultiValuesSourceFieldConfig.Builder::setTimeZone, p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ZoneId.of(p.text()); - } else { - return ZoneOffset.ofHours(p.intValue()); - } - }, ParseField.CommonFields.TIME_ZONE, ObjectParser.ValueType.LONG); - } + BaseMultiValuesSourceFieldConfig.PARSER.apply(parser, scriptable, timezoneAware); if (filtered) { parser.declareField( @@ -112,26 +78,12 @@ public class MultiValuesSourceFieldConfig implements Writeable, ToXContentObject }; protected MultiValuesSourceFieldConfig(String fieldName, Object missing, Script script, ZoneId timeZone, QueryBuilder filter) { - this.fieldName = fieldName; - this.missing = missing; - this.script = script; - this.timeZone = timeZone; + super(fieldName, missing, script, timeZone); this.filter = filter; } public MultiValuesSourceFieldConfig(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - this.fieldName = in.readOptionalString(); - } else { - this.fieldName = in.readString(); - } - this.missing = in.readGenericValue(); - this.script = in.readOptionalWriteable(Script::new); - if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { - this.timeZone = DateUtils.dateTimeZoneToZoneId(in.readOptionalTimeZone()); - } else { - this.timeZone = in.readOptionalZoneId(); - } + super(in); if (in.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) { this.filter = in.readOptionalNamedWriteable(QueryBuilder.class); } else { @@ -139,133 +91,43 @@ public MultiValuesSourceFieldConfig(StreamInput in) throws IOException { } } - public Object getMissing() { - return missing; - } - - public Script getScript() { - return script; - } - - public ZoneId getTimeZone() { - return timeZone; - } - - public String getFieldName() { - return fieldName; - } - public QueryBuilder getFilter() { return filter; } @Override - public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - out.writeOptionalString(fieldName); - } else { - out.writeString(fieldName); - } - out.writeGenericValue(missing); - out.writeOptionalWriteable(script); - if (out.getVersion().before(LegacyESVersion.V_7_0_0)) { - out.writeOptionalTimeZone(DateUtils.zoneIdToDateTimeZone(timeZone)); - } else { - out.writeOptionalZoneId(timeZone); - } + public void doWriteTo(StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(LegacyESVersion.V_7_8_0)) { out.writeOptionalNamedWriteable(filter); } } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (missing != null) { - builder.field(ParseField.CommonFields.MISSING.getPreferredName(), missing); - } - if (script != null) { - builder.field(Script.SCRIPT_PARSE_FIELD.getPreferredName(), script); - } - if (fieldName != null) { - builder.field(ParseField.CommonFields.FIELD.getPreferredName(), fieldName); - } - if (timeZone != null) { - builder.field(ParseField.CommonFields.TIME_ZONE.getPreferredName(), timeZone.getId()); - } + public void doXContentBody(XContentBuilder builder, Params params) throws IOException { if (filter != null) { builder.field(FILTER.getPreferredName()); filter.toXContent(builder, params); } - builder.endObject(); - return builder; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; + if (super.equals(o) == false) return false; + MultiValuesSourceFieldConfig that = (MultiValuesSourceFieldConfig) o; - return Objects.equals(fieldName, that.fieldName) - && Objects.equals(missing, that.missing) - && Objects.equals(script, that.script) - && Objects.equals(timeZone, that.timeZone) - && Objects.equals(filter, that.filter); + return Objects.equals(filter, that.filter); } @Override public int hashCode() { - return Objects.hash(fieldName, missing, script, timeZone, filter); + return Objects.hash(super.hashCode(), filter); } - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - private String fieldName; - private Object missing = null; - private Script script = null; - private ZoneId timeZone = null; + public static class Builder extends BaseMultiValuesSourceFieldConfig.Builder { private QueryBuilder filter = null; - public String getFieldName() { - return fieldName; - } - - public Builder setFieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public Object getMissing() { - return missing; - } - - public Builder setMissing(Object missing) { - this.missing = missing; - return this; - } - - public Script getScript() { - return script; - } - - public Builder setScript(Script script) { - this.script = script; - return this; - } - - public ZoneId getTimeZone() { - return timeZone; - } - - public Builder setTimeZone(ZoneId timeZone) { - this.timeZone = timeZone; - return this; - } - public Builder setFilter(QueryBuilder filter) { this.filter = filter; return this; diff --git a/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java index fe029d22a45b2..421865013a28c 100644 --- a/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/opensearch/search/aggregations/AggregationsTests.java @@ -64,6 +64,7 @@ import org.opensearch.search.aggregations.bucket.range.InternalRangeTests; import org.opensearch.search.aggregations.bucket.sampler.InternalSamplerTests; import org.opensearch.search.aggregations.bucket.terms.DoubleTermsTests; +import org.opensearch.search.aggregations.bucket.terms.InternalMultiTermsTests; import org.opensearch.search.aggregations.bucket.terms.LongRareTermsTests; import org.opensearch.search.aggregations.bucket.terms.LongTermsTests; import org.opensearch.search.aggregations.bucket.terms.SignificantLongTermsTests; @@ -172,6 +173,7 @@ private static List> getAggsTests() { aggsTests.add(new InternalTopHitsTests()); aggsTests.add(new InternalCompositeTests()); aggsTests.add(new InternalMedianAbsoluteDeviationTests()); + aggsTests.add(new InternalMultiTermsTests()); return Collections.unmodifiableList(aggsTests); } diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTermsTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTermsTests.java new file mode 100644 index 0000000000000..2657f2bdd5138 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/InternalMultiTermsTests.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.util.BytesRef; +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.search.aggregations.ParsedMultiBucketAggregation; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.ValuesSourceType; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static java.util.Arrays.asList; + +public class InternalMultiTermsTests extends InternalTermsTestCase { + + /** + * terms count and type should consistent across entire test. + */ + private final List types = getSupportedValuesSourceTypes(); + + @Override + protected InternalTerms createTestInstance( + String name, + Map metadata, + InternalAggregations aggregations, + boolean showTermDocCountError, + long docCountError + ) { + BucketOrder order = BucketOrder.count(false); + long minDocCount = 1; + int requiredSize = 3; + int shardSize = requiredSize + 2; + long otherDocCount = 0; + + final int numBuckets = randomNumberOfBuckets(); + + List buckets = new ArrayList<>(); + List formats = types.stream().map(type -> type.getFormatter(null, null)).collect(Collectors.toList()); + + for (int i = 0; i < numBuckets; i++) { + buckets.add( + new InternalMultiTerms.Bucket( + types.stream().map(this::value).collect(Collectors.toList()), + minDocCount, + aggregations, + showTermDocCountError, + docCountError, + formats + ) + ); + } + BucketOrder reduceOrder = rarely() ? order : BucketOrder.key(true); + // mimic per-shard bucket sort operation, which is required by bucket reduce phase. + Collections.sort(buckets, reduceOrder.comparator()); + return new InternalMultiTerms( + name, + reduceOrder, + order, + requiredSize, + minDocCount, + metadata, + shardSize, + showTermDocCountError, + otherDocCount, + docCountError, + formats, + buckets + ); + } + + @Override + protected Class implementationClass() { + return ParsedMultiTerms.class; + } + + private static List getSupportedValuesSourceTypes() { + return Collections.unmodifiableList( + asList( + CoreValuesSourceType.NUMERIC, + CoreValuesSourceType.BYTES, + CoreValuesSourceType.IP, + CoreValuesSourceType.DATE, + CoreValuesSourceType.BOOLEAN + ) + ); + } + + private Object value(ValuesSourceType type) { + if (CoreValuesSourceType.NUMERIC.equals(type)) { + return randomInt(); + } else if (CoreValuesSourceType.DATE.equals(type)) { + return randomNonNegativeLong(); + } else if (CoreValuesSourceType.BOOLEAN.equals(type)) { + return randomBoolean(); + } else if (CoreValuesSourceType.BYTES.equals(type)) { + return new BytesRef(randomAlphaOfLength(10)); + } else if (CoreValuesSourceType.IP.equals(type)) { + return new BytesRef(InetAddressPoint.encode(randomIp(randomBoolean()))); + } + throw new IllegalArgumentException("unexpected type [" + type.typeName() + "]"); + } +} diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilderTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilderTests.java new file mode 100644 index 0000000000000..505fb7382ab3b --- /dev/null +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregationBuilderTests.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.opensearch.search.DocValueFormat; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.BaseAggregationTestCase; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.support.MultiTermsValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValueType; + +import java.time.ZoneId; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.function.Supplier; + +public class MultiTermsAggregationBuilderTests extends BaseAggregationTestCase { + + @Override + protected MultiTermsAggregationBuilder createTestAggregatorBuilder() { + String name = randomAlphaOfLengthBetween(3, 20); + MultiTermsAggregationBuilder factory = new MultiTermsAggregationBuilder(name); + + int termsCount = randomIntBetween(2, 10); + List fieldConfigs = new ArrayList<>(); + for (int i = 0; i < termsCount; i++) { + fieldConfigs.add(randomFieldConfig()); + } + factory.terms(fieldConfigs); + + if (randomBoolean()) { + factory.size(randomIntBetween(1, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + factory.shardSize(randomIntBetween(1, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + int minDocCount = randomInt(4); + switch (minDocCount) { + case 0: + break; + case 1: + case 2: + case 3: + case 4: + minDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); + } + factory.minDocCount(minDocCount); + } + if (randomBoolean()) { + int shardMinDocCount = randomInt(4); + switch (shardMinDocCount) { + case 0: + break; + case 1: + case 2: + case 3: + case 4: + shardMinDocCount = randomIntBetween(0, Integer.MAX_VALUE); + break; + default: + fail(); + } + factory.shardMinDocCount(shardMinDocCount); + } + if (randomBoolean()) { + factory.collectMode(randomFrom(Aggregator.SubAggCollectionMode.values())); + } + if (randomBoolean()) { + List order = randomOrder(); + if (order.size() == 1 && randomBoolean()) { + factory.order(order.get(0)); + } else { + factory.order(order); + } + } + if (randomBoolean()) { + factory.showTermDocCountError(randomBoolean()); + } + return factory; + } + + public void testInvalidTermsParams() { + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> { new MultiTermsAggregationBuilder("_name").terms(Collections.singletonList(randomFieldConfig())); } + ); + assertEquals( + "multi term aggregation must has at least 2 terms. Found [1] in [_name] Use terms aggregation for single term aggregation", + exception.getMessage() + ); + + exception = expectThrows( + IllegalArgumentException.class, + () -> { new MultiTermsAggregationBuilder("_name").terms(Collections.emptyList()); } + ); + assertEquals("multi term aggregation must has at least 2 terms. Found [0] in [_name]", exception.getMessage()); + + exception = expectThrows(IllegalArgumentException.class, () -> { new MultiTermsAggregationBuilder("_name").terms(null); }); + assertEquals("[terms] must not be null. Found null terms in [_name]", exception.getMessage()); + } + + private List randomOrder() { + List orders = new ArrayList<>(); + switch (randomInt(4)) { + case 0: + orders.add(BucketOrder.key(randomBoolean())); + break; + case 1: + orders.add(BucketOrder.count(randomBoolean())); + break; + case 2: + orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean())); + break; + case 3: + orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean())); + break; + case 4: + int numOrders = randomIntBetween(1, 3); + for (int i = 0; i < numOrders; i++) { + orders.addAll(randomOrder()); + } + break; + default: + fail(); + } + return orders; + } + + protected static MultiTermsValuesSourceConfig randomFieldConfig() { + String field = randomAlphaOfLength(10); + Object missing = randomBoolean() ? randomAlphaOfLength(10) : null; + ZoneId timeZone = randomBoolean() ? randomZone() : null; + ValueType userValueTypeHint = randomBoolean() + ? randomFrom(ValueType.STRING, ValueType.LONG, ValueType.DOUBLE, ValueType.DATE, ValueType.IP) + : null; + String format = randomBoolean() ? randomNumericDocValueFormat().toString() : null; + return randomFieldOrScript( + new MultiTermsValuesSourceConfig.Builder().setMissing(missing) + .setTimeZone(timeZone) + .setUserValueTypeHint(userValueTypeHint) + .setFormat(format), + field + ).build(); + } + + protected static MultiTermsValuesSourceConfig.Builder randomFieldOrScript(MultiTermsValuesSourceConfig.Builder builder, String field) { + int choice = randomInt(1); + switch (choice) { + case 0: + builder.setFieldName(field); + break; + case 1: + builder.setScript(mockScript("doc[" + field + "] + 1")); + break; + default: + throw new AssertionError("Unknown random operation [" + choice + "]"); + } + return builder; + } + + /** + * @return a random {@link DocValueFormat} that can be used in aggregations which + * compute numbers. + */ + protected static DocValueFormat randomNumericDocValueFormat() { + final List> formats = new ArrayList<>(3); + formats.add(() -> DocValueFormat.RAW); + formats.add(() -> new DocValueFormat.Decimal(randomFrom("###.##", "###,###.##"))); + return randomFrom(formats).get(); + } +} diff --git a/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java new file mode 100644 index 0000000000000..f3922a65ff264 --- /dev/null +++ b/server/src/test/java/org/opensearch/search/aggregations/bucket/terms/MultiTermsAggregatorTests.java @@ -0,0 +1,909 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.bucket.terms; + +import org.apache.lucene.document.DoubleDocValuesField; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FloatDocValuesField; +import org.apache.lucene.document.InetAddressPoint; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.document.StringField; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.util.BytesRef; +import org.hamcrest.MatcherAssert; +import org.opensearch.common.CheckedConsumer; +import org.opensearch.common.network.InetAddresses; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.time.DateFormatter; +import org.opensearch.index.mapper.BooleanFieldMapper; +import org.opensearch.index.mapper.DateFieldMapper; +import org.opensearch.index.mapper.GeoPointFieldMapper; +import org.opensearch.index.mapper.IpFieldMapper; +import org.opensearch.index.mapper.KeywordFieldMapper; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.index.mapper.NumberFieldMapper; +import org.opensearch.script.MockScriptEngine; +import org.opensearch.script.Script; +import org.opensearch.script.ScriptEngine; +import org.opensearch.script.ScriptModule; +import org.opensearch.script.ScriptService; +import org.opensearch.script.ScriptType; +import org.opensearch.search.aggregations.AggregationBuilder; +import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.search.aggregations.BucketOrder; +import org.opensearch.search.aggregations.metrics.InternalMax; +import org.opensearch.search.aggregations.metrics.MaxAggregationBuilder; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.MultiTermsValuesSourceConfig; +import org.opensearch.search.aggregations.support.ValueType; +import org.opensearch.search.aggregations.support.ValuesSourceType; +import org.opensearch.search.lookup.LeafDocLookup; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Function; + +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static java.util.stream.Collectors.toList; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; + +public class MultiTermsAggregatorTests extends AggregatorTestCase { + private static final String FIELD_NAME = "field"; + private static final String VALUE_SCRIPT_NAME = "value_script"; + private static final String FIELD_SCRIPT_NAME = "field_script"; + + private static final String AGG_NAME = "_name"; + + private static final String INT_FIELD = "int"; + private static final String LONG_FIELD = "long"; + private static final String FLOAT_FIELD = "float"; + private static final String DOUBLE_FIELD = "double"; + private static final String KEYWORD_FIELD = "keyword"; + private static final String DATE_FIELD = "date"; + private static final String IP_FIELD = "ip"; + private static final String GEO_POINT_FIELD = "geopoint"; + private static final String BOOL_FIELD = "bool"; + private static final String UNRELATED_KEYWORD_FIELD = "unrelated"; + + private static final Map mappedFieldTypeMap = new HashMap() { + { + put(INT_FIELD, new NumberFieldMapper.NumberFieldType(INT_FIELD, NumberFieldMapper.NumberType.INTEGER)); + put(LONG_FIELD, new NumberFieldMapper.NumberFieldType(LONG_FIELD, NumberFieldMapper.NumberType.LONG)); + put(FLOAT_FIELD, new NumberFieldMapper.NumberFieldType(FLOAT_FIELD, NumberFieldMapper.NumberType.FLOAT)); + put(DOUBLE_FIELD, new NumberFieldMapper.NumberFieldType(DOUBLE_FIELD, NumberFieldMapper.NumberType.DOUBLE)); + put(DATE_FIELD, dateFieldType(DATE_FIELD)); + put(KEYWORD_FIELD, new KeywordFieldMapper.KeywordFieldType(KEYWORD_FIELD)); + put(IP_FIELD, new IpFieldMapper.IpFieldType(IP_FIELD)); + put(FIELD_NAME, new NumberFieldMapper.NumberFieldType(FIELD_NAME, NumberFieldMapper.NumberType.INTEGER)); + put(UNRELATED_KEYWORD_FIELD, new KeywordFieldMapper.KeywordFieldType(UNRELATED_KEYWORD_FIELD)); + put(GEO_POINT_FIELD, new GeoPointFieldMapper.GeoPointFieldType(GEO_POINT_FIELD)); + put(BOOL_FIELD, new BooleanFieldMapper.BooleanFieldType(BOOL_FIELD)); + } + }; + + private static final Consumer NONE_DECORATOR = null; + + @Override + protected List getSupportedValuesSourceTypes() { + return Collections.unmodifiableList( + asList( + CoreValuesSourceType.NUMERIC, + CoreValuesSourceType.BYTES, + CoreValuesSourceType.IP, + CoreValuesSourceType.DATE, + CoreValuesSourceType.BOOLEAN + ) + ); + } + + @Override + protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) { + return createTestAggregatorBuilder(asList(term(fieldName), term(fieldName))); + } + + @Override + protected ScriptService getMockScriptService() { + final Map, Object>> scripts = org.opensearch.common.collect.Map.of( + VALUE_SCRIPT_NAME, + vars -> ((Number) vars.get("_value")).doubleValue() + 1, + FIELD_SCRIPT_NAME, + vars -> { + final String fieldName = (String) vars.get(FIELD_NAME); + final LeafDocLookup lookup = (LeafDocLookup) vars.get("doc"); + return lookup.get(fieldName).stream().map(value -> ((Number) value).longValue() + 1).collect(toList()); + } + ); + final MockScriptEngine engine = new MockScriptEngine(MockScriptEngine.NAME, scripts, emptyMap()); + final Map engines = singletonMap(engine.getType(), engine); + return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS); + } + + public void testNumbers() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + fieldConfigs(asList(INT_FIELD, LONG_FIELD, FLOAT_FIELD, DOUBLE_FIELD)), + NONE_DECORATOR, + iw -> { + iw.addDocument( + asList( + new NumericDocValuesField(INT_FIELD, 1), + new SortedNumericDocValuesField(LONG_FIELD, 1L), + new FloatDocValuesField(FLOAT_FIELD, 1.0f), + new DoubleDocValuesField(DOUBLE_FIELD, 1.0d) + ) + ); + iw.addDocument( + asList( + new NumericDocValuesField(INT_FIELD, 1), + new SortedNumericDocValuesField(LONG_FIELD, 1L), + new FloatDocValuesField(FLOAT_FIELD, 1.0f), + new DoubleDocValuesField(DOUBLE_FIELD, 1.0d) + ) + ); + iw.addDocument( + asList( + new NumericDocValuesField(INT_FIELD, 2), + new SortedNumericDocValuesField(LONG_FIELD, 2L), + new FloatDocValuesField(FLOAT_FIELD, 2.0f), + new DoubleDocValuesField(DOUBLE_FIELD, 2.0d) + ) + ); + iw.addDocument( + asList( + new NumericDocValuesField(INT_FIELD, 2), + new SortedNumericDocValuesField(LONG_FIELD, 2L), + new FloatDocValuesField(FLOAT_FIELD, 3.0f), + new DoubleDocValuesField(DOUBLE_FIELD, 3.0d) + ) + ); + iw.addDocument( + asList( + new NumericDocValuesField(INT_FIELD, 2), + new SortedNumericDocValuesField(LONG_FIELD, 2L), + new FloatDocValuesField(FLOAT_FIELD, 3.0f), + new DoubleDocValuesField(DOUBLE_FIELD, 3.0d) + ) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo(1L), equalTo(1L), equalTo(1.0), equalTo(1.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo(2L), equalTo(2L), equalTo(3.0), equalTo(3.0))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo(2L), equalTo(2L), equalTo(2.0), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + } + ); + } + + public void testMixNumberAndKeywordWithFilter() throws IOException { + testAggregation( + new TermQuery(new Term(KEYWORD_FIELD, "a")), + fieldConfigs(asList(KEYWORD_FIELD, FLOAT_FIELD)), + NONE_DECORATOR, + iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new StringField(KEYWORD_FIELD, "a", Field.Store.NO), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new StringField(KEYWORD_FIELD, "a", Field.Store.NO), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new StringField(KEYWORD_FIELD, "b", Field.Store.NO), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new StringField(KEYWORD_FIELD, "a", Field.Store.NO), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(2)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(1.0))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + } + ); + } + + public void testMixNumberAndKeyword() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD, FLOAT_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), + new NumericDocValuesField(INT_FIELD, 2), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L), equalTo(1.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("b"), equalTo(1L), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo(2L), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + }); + } + + public void testMultiValuesField() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument( + asList( + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new SortedNumericDocValuesField(INT_FIELD, 1) + ) + ); + iw.addDocument( + asList( + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedNumericDocValuesField(INT_FIELD, 1), + new SortedNumericDocValuesField(INT_FIELD, 3) + ) + ); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(3L))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("b"), equalTo(1L))); + }); + + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument( + asList( + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new SortedNumericDocValuesField(INT_FIELD, 1), + new SortedNumericDocValuesField(INT_FIELD, 2) + ) + ); + iw.addDocument( + asList( + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedSetDocValuesField(KEYWORD_FIELD, new BytesRef("c")), + new SortedNumericDocValuesField(INT_FIELD, 1), + new SortedNumericDocValuesField(INT_FIELD, 3) + ) + ); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(7)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(2L))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("a"), equalTo(3L))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(3).getKey(), contains(equalTo("b"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(3).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(4).getKey(), contains(equalTo("b"), equalTo(2L))); + MatcherAssert.assertThat(h.getBuckets().get(4).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(5).getKey(), contains(equalTo("c"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(5).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(6).getKey(), contains(equalTo("c"), equalTo(3L))); + MatcherAssert.assertThat(h.getBuckets().get(6).getDocCount(), equalTo(1L)); + }); + } + + public void testScripts() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(KEYWORD_FIELD).build(), + new MultiTermsValuesSourceConfig.Builder().setScript( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap(FIELD_NAME, FIELD_NAME)) + ).setUserValueTypeHint(ValueType.LONG).build() + ), + null, + iw -> { + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(FIELD_NAME, 1)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(FIELD_NAME, 3)) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("b"), equalTo(3L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(2L))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo(4L))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + } + ); + } + + public void testScriptsWithoutValueTypeHint() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(KEYWORD_FIELD).build(), + new MultiTermsValuesSourceConfig.Builder().setScript( + new Script(ScriptType.INLINE, MockScriptEngine.NAME, FIELD_SCRIPT_NAME, singletonMap(FIELD_NAME, FIELD_NAME)) + ).build() + ), + null, + iw -> { + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(FIELD_NAME, 1)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(FIELD_NAME, 3)) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("b"), equalTo("3"))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo("2"))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo("4"))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + } + ); + } + + public void testValueScripts() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(KEYWORD_FIELD).build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(FIELD_NAME) + .setScript(new Script(ScriptType.INLINE, MockScriptEngine.NAME, VALUE_SCRIPT_NAME, emptyMap())) + .build() + ), + null, + iw -> { + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(FIELD_NAME, 1)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(FIELD_NAME, 2)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(FIELD_NAME, 3)) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("b"), equalTo(3.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo(4.0))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + } + ); + } + + public void testOrderByMetrics() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD)), b -> { + b.order(BucketOrder.aggregation("max", false)); + b.subAggregation(new MaxAggregationBuilder("max").field(FLOAT_FIELD)); + }, iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new NumericDocValuesField(INT_FIELD, 2), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), + new NumericDocValuesField(INT_FIELD, 3), + new FloatDocValuesField(FLOAT_FIELD, 3.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 4.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new NumericDocValuesField(INT_FIELD, 2), + new FloatDocValuesField(FLOAT_FIELD, 3.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), + new NumericDocValuesField(INT_FIELD, 3), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(((InternalMax) (h.getBuckets().get(0).getAggregations().get("max"))).value(), closeTo(4.0f, 0.01)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("b"), equalTo(2L))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(((InternalMax) (h.getBuckets().get(1).getAggregations().get("max"))).value(), closeTo(3.0f, 0.01)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo(3L))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(((InternalMax) (h.getBuckets().get(2).getAggregations().get("max"))).value(), closeTo(3.0f, 0.01)); + }); + } + + public void testNumberFieldFormat() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList(term(KEYWORD_FIELD), new MultiTermsValuesSourceConfig.Builder().setFieldName(DOUBLE_FIELD).setFormat("00.00").build()), + null, + iw -> { + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new DoubleDocValuesField(DOUBLE_FIELD, 1.0d)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new DoubleDocValuesField(DOUBLE_FIELD, 2.0d)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new DoubleDocValuesField(DOUBLE_FIELD, 2.0d)) + ); + iw.addDocument( + asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new DoubleDocValuesField(DOUBLE_FIELD, 1.0d)) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKeyAsString(), equalTo("a|01.00")); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKeyAsString(), equalTo("a|02.00")); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKeyAsString(), equalTo("b|02.00")); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + } + ); + } + + public void testDates() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList(new MultiTermsValuesSourceConfig.Builder().setFieldName(DATE_FIELD).build(), term(KEYWORD_FIELD)), + null, + iw -> { + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-22")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-21")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")) + ) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(4)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKeyAsString(), equalTo("2022-03-23|a")); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKeyAsString(), equalTo("2022-03-21|c")); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKeyAsString(), equalTo("2022-03-22|a")); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(3).getKeyAsString(), equalTo("2022-03-23|b")); + MatcherAssert.assertThat(h.getBuckets().get(3).getDocCount(), equalTo(1L)); + } + ); + } + + public void testDatesFieldFormat() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(DATE_FIELD).setFormat("yyyy/MM/dd").build(), + term(KEYWORD_FIELD) + ), + null, + iw -> { + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-22")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-23")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")) + ) + ); + iw.addDocument( + asList( + new SortedNumericDocValuesField(DATE_FIELD, dateFieldType(DATE_FIELD).parse("2022-03-21")), + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")) + ) + ); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(4)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKeyAsString(), equalTo("2022/03/23|a")); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKeyAsString(), equalTo("2022/03/21|c")); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKeyAsString(), equalTo("2022/03/22|a")); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(3).getKeyAsString(), equalTo("2022/03/23|b")); + MatcherAssert.assertThat(h.getBuckets().get(3).getDocCount(), equalTo(1L)); + } + ); + } + + public void testIpAndKeyword() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, IP_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedDocValuesField(IP_FIELD, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.0")))) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + new SortedDocValuesField(IP_FIELD, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1")))) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), + new SortedDocValuesField(IP_FIELD, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.2")))) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new SortedDocValuesField(IP_FIELD, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.0")))) + ) + ); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(3)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo("192.168.0.0"))); + MatcherAssert.assertThat(h.getBuckets().get(0).getKeyAsString(), equalTo("a|192.168.0.0")); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("b"), equalTo("192.168.0.1"))); + MatcherAssert.assertThat(h.getBuckets().get(1).getKeyAsString(), equalTo("b|192.168.0.1")); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("c"), equalTo("192.168.0.2"))); + MatcherAssert.assertThat(h.getBuckets().get(2).getKeyAsString(), equalTo("c|192.168.0.2")); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + }); + } + + public void testEmpty() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD)), NONE_DECORATOR, iw -> {}, h -> { + MatcherAssert.assertThat(h.getName(), equalTo(AGG_NAME)); + MatcherAssert.assertThat(h.getBuckets(), hasSize(0)); + }); + } + + public void testNull() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD, FLOAT_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument(asList(new NumericDocValuesField(INT_FIELD, 1), new FloatDocValuesField(FLOAT_FIELD, 2.0f))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new FloatDocValuesField(FLOAT_FIELD, 2.0f))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("d")), new NumericDocValuesField(INT_FIELD, 3))); + + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(1)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L), equalTo(1.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + }); + + } + + public void testMissing() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(KEYWORD_FIELD).setMissing("a").build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(INT_FIELD).setMissing(1).build(), + new MultiTermsValuesSourceConfig.Builder().setFieldName(FLOAT_FIELD).setMissing(2.0f).build() + ), + NONE_DECORATOR, + iw -> { + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + iw.addDocument( + asList( + // missing KEYWORD_FIELD + new NumericDocValuesField(INT_FIELD, 1), + new FloatDocValuesField(FLOAT_FIELD, 1.0f) + ) + ); + iw.addDocument( + asList( + new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), + // missing INT_FIELD + new FloatDocValuesField(FLOAT_FIELD, 2.0f) + ) + ); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(INT_FIELD, 2) + // missing FLOAT_FIELD + )); + iw.addDocument(singletonList(new SortedDocValuesField(UNRELATED_KEYWORD_FIELD, new BytesRef("unrelated")))); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(4)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(1L), equalTo(1.0))); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("b"), equalTo(1L), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(3).getKey(), contains(equalTo("c"), equalTo(2L), equalTo(2.0))); + MatcherAssert.assertThat(h.getBuckets().get(3).getDocCount(), equalTo(1L)); + } + ); + } + + public void testMixKeywordAndBoolean() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, BOOL_FIELD)), NONE_DECORATOR, iw -> { + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(BOOL_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(BOOL_FIELD, 0))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(BOOL_FIELD, 0))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(BOOL_FIELD, 1))); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(4)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(false))); + MatcherAssert.assertThat(h.getBuckets().get(0).getKeyAsString(), equalTo("a|false")); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(1).getKey(), contains(equalTo("a"), equalTo(true))); + MatcherAssert.assertThat(h.getBuckets().get(1).getKeyAsString(), equalTo("a|true")); + MatcherAssert.assertThat(h.getBuckets().get(1).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(2).getKey(), contains(equalTo("b"), equalTo(false))); + MatcherAssert.assertThat(h.getBuckets().get(2).getKeyAsString(), equalTo("b|false")); + MatcherAssert.assertThat(h.getBuckets().get(2).getDocCount(), equalTo(1L)); + MatcherAssert.assertThat(h.getBuckets().get(3).getKey(), contains(equalTo("b"), equalTo(true))); + MatcherAssert.assertThat(h.getBuckets().get(3).getKeyAsString(), equalTo("b|true")); + MatcherAssert.assertThat(h.getBuckets().get(3).getDocCount(), equalTo(1L)); + }); + } + + public void testGeoPointField() { + assertThrows( + IllegalArgumentException.class, + () -> testAggregation( + new MatchAllDocsQuery(), + asList(term(KEYWORD_FIELD), term(GEO_POINT_FIELD)), + NONE_DECORATOR, + iw -> {}, + f -> fail("should throw exception") + ) + ); + } + + public void testMinDocCount() throws IOException { + testAggregation(new MatchAllDocsQuery(), fieldConfigs(asList(KEYWORD_FIELD, INT_FIELD)), b -> b.minDocCount(2), iw -> { + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(INT_FIELD, 2))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(INT_FIELD, 2))); + }, h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(1)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + }); + } + + public void testIncludeExclude() throws IOException { + testAggregation( + new MatchAllDocsQuery(), + asList( + new MultiTermsValuesSourceConfig.Builder().setFieldName(KEYWORD_FIELD) + .setIncludeExclude(new IncludeExclude("a", null)) + .build(), + term(INT_FIELD) + ), + NONE_DECORATOR, + iw -> { + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("a")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("b")), new NumericDocValuesField(INT_FIELD, 1))); + iw.addDocument(asList(new SortedDocValuesField(KEYWORD_FIELD, new BytesRef("c")), new NumericDocValuesField(INT_FIELD, 2))); + }, + h -> { + MatcherAssert.assertThat(h.getBuckets(), hasSize(1)); + MatcherAssert.assertThat(h.getBuckets().get(0).getKey(), contains(equalTo("a"), equalTo(1L))); + MatcherAssert.assertThat(h.getBuckets().get(0).getDocCount(), equalTo(2L)); + } + ); + } + + private void testAggregation( + Query query, + List terms, + Consumer decorator, + CheckedConsumer indexBuilder, + Consumer verify + ) throws IOException { + MultiTermsAggregationBuilder builder = createTestAggregatorBuilder(terms); + if (decorator != NONE_DECORATOR) { + decorator.accept(builder); + } + testCase(builder, query, indexBuilder, verify, mappedFieldTypeMap.values().toArray(new MappedFieldType[] {})); + } + + private MultiTermsValuesSourceConfig term(String field) { + return new MultiTermsValuesSourceConfig.Builder().setFieldName(field).build(); + } + + private MultiTermsAggregationBuilder createTestAggregatorBuilder(List termsConfig) { + MultiTermsAggregationBuilder factory = new MultiTermsAggregationBuilder(AGG_NAME); + factory.terms(termsConfig); + + if (randomBoolean()) { + factory.size(randomIntBetween(10, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + factory.shardSize(randomIntBetween(10, Integer.MAX_VALUE)); + } + if (randomBoolean()) { + factory.showTermDocCountError(randomBoolean()); + } + return factory; + } + + private List fieldConfigs(List terms) { + List termConfigs = new ArrayList<>(); + for (String term : terms) { + termConfigs.add(term(term)); + } + return termConfigs; + } + + private static DateFieldMapper.DateFieldType dateFieldType(String name) { + return new DateFieldMapper.DateFieldType( + name, + true, + false, + true, + DateFormatter.forPattern("date"), + DateFieldMapper.Resolution.MILLISECONDS, + null, + Collections.emptyMap() + ); + } +} diff --git a/server/src/test/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfigTests.java b/server/src/test/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfigTests.java new file mode 100644 index 0000000000000..a142faa2048ea --- /dev/null +++ b/server/src/test/java/org/opensearch/search/aggregations/support/MultiTermsValuesSourceConfigTests.java @@ -0,0 +1,65 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.aggregations.support; + +import org.opensearch.common.io.stream.NamedWriteableRegistry; +import org.opensearch.common.io.stream.Writeable; +import org.opensearch.common.settings.Settings; +import org.opensearch.common.xcontent.NamedXContentRegistry; +import org.opensearch.common.xcontent.XContentParser; +import org.opensearch.script.Script; +import org.opensearch.search.SearchModule; +import org.opensearch.test.AbstractSerializingTestCase; + +import java.io.IOException; +import java.time.ZoneId; +import java.util.Collections; + +import static org.hamcrest.Matchers.equalTo; + +public class MultiTermsValuesSourceConfigTests extends AbstractSerializingTestCase { + + @Override + protected MultiTermsValuesSourceConfig doParseInstance(XContentParser parser) throws IOException { + return MultiTermsValuesSourceConfig.PARSER.apply(true, true, true, true).apply(parser, null).build(); + } + + @Override + protected MultiTermsValuesSourceConfig createTestInstance() { + String field = randomAlphaOfLength(10); + Object missing = randomBoolean() ? randomAlphaOfLength(10) : null; + ZoneId timeZone = randomBoolean() ? randomZone() : null; + Script script = randomBoolean() ? new Script(randomAlphaOfLength(10)) : null; + return new MultiTermsValuesSourceConfig.Builder().setFieldName(field) + .setMissing(missing) + .setScript(script) + .setTimeZone(timeZone) + .build(); + } + + @Override + protected Writeable.Reader instanceReader() { + return MultiTermsValuesSourceConfig::new; + } + + public void testMissingFieldScript() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new MultiTermsValuesSourceConfig.Builder().build()); + assertThat(e.getMessage(), equalTo("[field] and [script] cannot both be null. Please specify one or the other.")); + } + + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedWriteables()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, Collections.emptyList()).getNamedXContents()); + } +} diff --git a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java index 6be7abffb9ad6..f138de152a488 100644 --- a/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/InternalAggregationTestCase.java @@ -101,9 +101,11 @@ import org.opensearch.search.aggregations.bucket.terms.DoubleTerms; import org.opensearch.search.aggregations.bucket.terms.LongRareTerms; import org.opensearch.search.aggregations.bucket.terms.LongTerms; +import org.opensearch.search.aggregations.bucket.terms.MultiTermsAggregationBuilder; import org.opensearch.search.aggregations.bucket.terms.ParsedDoubleTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedLongRareTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedLongTerms; +import org.opensearch.search.aggregations.bucket.terms.ParsedMultiTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedSignificantLongTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedSignificantStringTerms; import org.opensearch.search.aggregations.bucket.terms.ParsedStringRareTerms; @@ -289,6 +291,7 @@ public ReduceContext forFinalReduction() { map.put(IpRangeAggregationBuilder.NAME, (p, c) -> ParsedBinaryRange.fromXContent(p, (String) c)); map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c)); map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); + map.put(MultiTermsAggregationBuilder.NAME, (p, c) -> ParsedMultiTerms.fromXContent(p, (String) c)); namedXContents = map.entrySet() .stream() From cf78065481ded1d0a8a7777779dced20399848cb Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 21 Apr 2022 16:16:57 -0500 Subject: [PATCH 38/41] [Remove] MainResponse version override cluster setting (#3031) OpenSearch 2.0.0 no longer needs HLRC compatibility with legacy clients. This commit removes all logic to spoof the version as a legacy cluster. Signed-off-by: Nicholas Walter Knize --- .../org/opensearch/client/PingAndInfoIT.java | 22 ---------- .../opensearch/action/main/MainResponse.java | 37 ++-------------- .../action/main/TransportMainAction.java | 39 +---------------- .../common/settings/ClusterSettings.java | 2 - .../action/main/MainActionTests.java | 43 ------------------- .../action/main/MainResponseTests.java | 17 -------- 6 files changed, 5 insertions(+), 155 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java index 72201084570bb..09ef90cef144d 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java @@ -33,7 +33,6 @@ package org.opensearch.client; import org.apache.http.client.methods.HttpGet; -import org.opensearch.action.main.TransportMainAction; import org.opensearch.client.core.MainResponse; import java.io.IOException; @@ -63,25 +62,4 @@ public void testInfo() throws IOException { assertTrue(versionMap.get("number").toString().startsWith(info.getVersion().getNumber())); assertEquals(versionMap.get("lucene_version"), info.getVersion().getLuceneVersion()); } - - public void testInfo_overrideResponseVersion() throws IOException { - Request overrideResponseVersionRequest = new Request("PUT", "/_cluster/settings"); - overrideResponseVersionRequest.setOptions(expectWarnings(TransportMainAction.OVERRIDE_MAIN_RESPONSE_VERSION_DEPRECATION_MESSAGE)); - overrideResponseVersionRequest.setJsonEntity("{\"persistent\":{\"compatibility\": {\"override_main_response_version\":true}}}"); - client().performRequest(overrideResponseVersionRequest); - - MainResponse info = highLevelClient().info(RequestOptions.DEFAULT); - assertEquals("7.10.2", info.getVersion().getNumber()); - - // Set back to default version. - Request resetResponseVersionRequest = new Request("PUT", "/_cluster/settings"); - resetResponseVersionRequest.setJsonEntity("{\"persistent\":{\"compatibility\": {\"override_main_response_version\":null}}}"); - client().performRequest(resetResponseVersionRequest); - - Map infoAsMap = entityAsMap(adminClient().performRequest(new Request(HttpGet.METHOD_NAME, "/"))); - @SuppressWarnings("unchecked") - Map versionMap = (Map) infoAsMap.get("version"); - info = highLevelClient().info(RequestOptions.DEFAULT); - assertTrue(versionMap.get("number").toString().startsWith(info.getVersion().getNumber())); - } } diff --git a/server/src/main/java/org/opensearch/action/main/MainResponse.java b/server/src/main/java/org/opensearch/action/main/MainResponse.java index bd4be885fa210..1f460e5dfb019 100644 --- a/server/src/main/java/org/opensearch/action/main/MainResponse.java +++ b/server/src/main/java/org/opensearch/action/main/MainResponse.java @@ -55,7 +55,6 @@ public class MainResponse extends ActionResponse implements ToXContentObject { private ClusterName clusterName; private String clusterUuid; private Build build; - private String versionNumber; public static final String TAGLINE = "The OpenSearch Project: https://opensearch.org/"; MainResponse() {} @@ -70,7 +69,6 @@ public class MainResponse extends ActionResponse implements ToXContentObject { if (in.getVersion().before(LegacyESVersion.V_7_0_0)) { in.readBoolean(); } - versionNumber = build.getQualifiedVersion(); } public MainResponse(String nodeName, Version version, ClusterName clusterName, String clusterUuid, Build build) { @@ -79,16 +77,6 @@ public MainResponse(String nodeName, Version version, ClusterName clusterName, S this.clusterName = clusterName; this.clusterUuid = clusterUuid; this.build = build; - this.versionNumber = build.getQualifiedVersion(); - } - - public MainResponse(String nodeName, Version version, ClusterName clusterName, String clusterUuid, Build build, String versionNumber) { - this.nodeName = nodeName; - this.version = version; - this.clusterName = clusterName; - this.clusterUuid = clusterUuid; - this.build = build; - this.versionNumber = versionNumber; } public String getNodeName() { @@ -111,18 +99,10 @@ public Build getBuild() { return build; } - public String getVersionNumber() { - return versionNumber; - } - @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(nodeName); - if (out.getVersion().before(Version.V_1_0_0)) { - Version.writeVersion(LegacyESVersion.V_7_10_2, out); - } else { - Version.writeVersion(version, out); - } + Version.writeVersion(version, out); clusterName.writeTo(out); out.writeString(clusterUuid); Build.writeBuild(build, out); @@ -137,11 +117,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("name", nodeName); builder.field("cluster_name", clusterName.value()); builder.field("cluster_uuid", clusterUuid); - builder.startObject("version"); - if (isCompatibilityModeDisabled()) { - builder.field("distribution", build.getDistribution()); - } - builder.field("number", versionNumber) + builder.startObject("version") + .field("distribution", build.getDistribution()) + .field("number", build.getQualifiedVersion()) .field("build_type", build.type().displayName()) .field("build_hash", build.hash()) .field("build_date", build.date()) @@ -155,12 +133,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - private boolean isCompatibilityModeDisabled() { - // if we are not in compatibility mode (spoofing versionNumber), then - // build.getQualifiedVersion is always used. - return build.getQualifiedVersion().equals(versionNumber); - } - private static final ObjectParser PARSER = new ObjectParser<>( MainResponse.class.getName(), true, @@ -189,7 +161,6 @@ private boolean isCompatibilityModeDisabled() { response.version = Version.fromString( ((String) value.get("number")).replace("-SNAPSHOT", "").replaceFirst("-(alpha\\d+|beta\\d+|rc\\d+)", "") ); - response.versionNumber = response.version.toString(); }, (parser, context) -> parser.map(), new ParseField("version")); } diff --git a/server/src/main/java/org/opensearch/action/main/TransportMainAction.java b/server/src/main/java/org/opensearch/action/main/TransportMainAction.java index ef6ebb27c4505..5170b23977b1e 100644 --- a/server/src/main/java/org/opensearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/opensearch/action/main/TransportMainAction.java @@ -33,7 +33,6 @@ package org.opensearch.action.main; import org.opensearch.Build; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; @@ -41,8 +40,6 @@ import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; -import org.opensearch.common.logging.DeprecationLogger; -import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.node.Node; import org.opensearch.tasks.Task; @@ -50,23 +47,8 @@ public class TransportMainAction extends HandledTransportAction { - private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(TransportMainAction.class); - - public static final String OVERRIDE_MAIN_RESPONSE_VERSION_KEY = "compatibility.override_main_response_version"; - - public static final Setting OVERRIDE_MAIN_RESPONSE_VERSION = Setting.boolSetting( - OVERRIDE_MAIN_RESPONSE_VERSION_KEY, - false, - Setting.Property.NodeScope, - Setting.Property.Dynamic - ); - - public static final String OVERRIDE_MAIN_RESPONSE_VERSION_DEPRECATION_MESSAGE = "overriding main response version" - + " number will be removed in a future version"; - private final String nodeName; private final ClusterService clusterService; - private volatile String responseVersion; @Inject public TransportMainAction( @@ -78,32 +60,13 @@ public TransportMainAction( super(MainAction.NAME, transportService, actionFilters, MainRequest::new); this.nodeName = Node.NODE_NAME_SETTING.get(settings); this.clusterService = clusterService; - setResponseVersion(OVERRIDE_MAIN_RESPONSE_VERSION.get(settings)); - - clusterService.getClusterSettings().addSettingsUpdateConsumer(OVERRIDE_MAIN_RESPONSE_VERSION, this::setResponseVersion); - } - - private void setResponseVersion(boolean isResponseVersionOverrideEnabled) { - if (isResponseVersionOverrideEnabled) { - DEPRECATION_LOGGER.deprecate(OVERRIDE_MAIN_RESPONSE_VERSION.getKey(), OVERRIDE_MAIN_RESPONSE_VERSION_DEPRECATION_MESSAGE); - this.responseVersion = LegacyESVersion.V_7_10_2.toString(); - } else { - this.responseVersion = Build.CURRENT.getQualifiedVersion(); - } } @Override protected void doExecute(Task task, MainRequest request, ActionListener listener) { ClusterState clusterState = clusterService.state(); listener.onResponse( - new MainResponse( - nodeName, - Version.CURRENT, - clusterState.getClusterName(), - clusterState.metadata().clusterUUID(), - Build.CURRENT, - responseVersion - ) + new MainResponse(nodeName, Version.CURRENT, clusterState.getClusterName(), clusterState.metadata().clusterUUID(), Build.CURRENT) ); } } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 4cacc3bcf37eb..3cd9b62fc474a 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -32,7 +32,6 @@ package org.opensearch.common.settings; import org.apache.logging.log4j.LogManager; -import org.opensearch.action.main.TransportMainAction; import org.opensearch.cluster.routing.allocation.decider.NodeLoadAwareAllocationDecider; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexSettings; @@ -553,7 +552,6 @@ public void apply(Settings value, Settings current, Settings previous) { FsHealthService.REFRESH_INTERVAL_SETTING, FsHealthService.SLOW_PATH_LOGGING_THRESHOLD_SETTING, FsHealthService.HEALTHY_TIMEOUT_SETTING, - TransportMainAction.OVERRIDE_MAIN_RESPONSE_VERSION, NodeLoadAwareAllocationDecider.CLUSTER_ROUTING_ALLOCATION_LOAD_AWARENESS_PROVISIONED_CAPACITY_SETTING, NodeLoadAwareAllocationDecider.CLUSTER_ROUTING_ALLOCATION_LOAD_AWARENESS_SKEW_FACTOR_SETTING, NodeLoadAwareAllocationDecider.CLUSTER_ROUTING_ALLOCATION_LOAD_AWARENESS_ALLOW_UNASSIGNED_PRIMARIES_SETTING, diff --git a/server/src/test/java/org/opensearch/action/main/MainActionTests.java b/server/src/test/java/org/opensearch/action/main/MainActionTests.java index 3cbb6b3eb29bd..479e36c2e13ce 100644 --- a/server/src/test/java/org/opensearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/opensearch/action/main/MainActionTests.java @@ -32,7 +32,6 @@ package org.opensearch.action.main; -import org.opensearch.LegacyESVersion; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.cluster.ClusterName; @@ -56,7 +55,6 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.action.main.TransportMainAction.OVERRIDE_MAIN_RESPONSE_VERSION_KEY; public class MainActionTests extends OpenSearchTestCase { @@ -130,45 +128,4 @@ public void onFailure(Exception e) { assertNotNull(responseRef.get()); verify(clusterService, times(1)).state(); } - - public void testMainResponseVersionOverrideEnabledByConfigSetting() { - final ClusterName clusterName = new ClusterName("opensearch"); - ClusterState state = ClusterState.builder(clusterName).blocks(mock(ClusterBlocks.class)).build(); - - final ClusterService clusterService = mock(ClusterService.class); - when(clusterService.state()).thenReturn(state); - when(clusterService.getClusterSettings()).thenReturn( - new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) - ); - - TransportService transportService = new TransportService( - Settings.EMPTY, - mock(Transport.class), - null, - TransportService.NOOP_TRANSPORT_INTERCEPTOR, - x -> null, - null, - Collections.emptySet() - ); - - final Settings settings = Settings.builder().put("node.name", "my-node").put(OVERRIDE_MAIN_RESPONSE_VERSION_KEY, true).build(); - - TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); - AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(mock(Task.class), new MainRequest(), new ActionListener() { - @Override - public void onResponse(MainResponse mainResponse) { - responseRef.set(mainResponse); - } - - @Override - public void onFailure(Exception e) { - logger.error("unexpected error", e); - } - }); - - final MainResponse mainResponse = responseRef.get(); - assertEquals(LegacyESVersion.V_7_10_2.toString(), mainResponse.getVersionNumber()); - assertWarnings(TransportMainAction.OVERRIDE_MAIN_RESPONSE_VERSION_DEPRECATION_MESSAGE); - } } diff --git a/server/src/test/java/org/opensearch/action/main/MainResponseTests.java b/server/src/test/java/org/opensearch/action/main/MainResponseTests.java index 6e2dbe4399410..b08f08a6d16bf 100644 --- a/server/src/test/java/org/opensearch/action/main/MainResponseTests.java +++ b/server/src/test/java/org/opensearch/action/main/MainResponseTests.java @@ -33,7 +33,6 @@ package org.opensearch.action.main; import org.opensearch.Build; -import org.opensearch.LegacyESVersion; import org.opensearch.Version; import org.opensearch.cluster.ClusterName; import org.opensearch.common.Strings; @@ -138,22 +137,6 @@ public void testToXContent() throws IOException { ); } - public void toXContent_overrideMainResponseVersion() throws IOException { - String responseVersion = LegacyESVersion.V_7_10_2.toString(); - MainResponse response = new MainResponse( - "nodeName", - Version.CURRENT, - new ClusterName("clusterName"), - randomAlphaOfLengthBetween(10, 20), - Build.CURRENT, - responseVersion - ); - XContentBuilder builder = XContentFactory.jsonBuilder(); - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertTrue(Strings.toString(builder).contains("\"number\":\"" + responseVersion + "\",")); - assertFalse(Strings.toString(builder).contains("\"distribution\":\"" + Build.CURRENT.getDistribution() + "\",")); - } - @Override protected MainResponse mutateInstance(MainResponse mutateInstance) { String clusterUuid = mutateInstance.getClusterUuid(); From 0801a9c18a53aabf14b9925e3e661589ea128c31 Mon Sep 17 00:00:00 2001 From: Poojita Raj Date: Thu, 21 Apr 2022 14:19:57 -0700 Subject: [PATCH 39/41] Add release notes for 1.3.1 (#3029) Signed-off-by: Poojita Raj --- .../opensearch.release-notes-1.3.1.md | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 release-notes/opensearch.release-notes-1.3.1.md diff --git a/release-notes/opensearch.release-notes-1.3.1.md b/release-notes/opensearch.release-notes-1.3.1.md new file mode 100644 index 0000000000000..04e2933303fc2 --- /dev/null +++ b/release-notes/opensearch.release-notes-1.3.1.md @@ -0,0 +1,21 @@ +## Version 1.3.1 Release Notes + +* __Exclude man page symlink in distribution (#2602)__ + + [Andrew Ross](mailto:andrross@amazon.com) - Fri, 25 Mar 2022 18:36:36 -0400 + + This is a short-term solution to unblock the build process for the 1.3 + + release. A tool used in that process (cpio) is failing on a symlink in + the JDK + man pages, so this is a hack to just remove that symlink. See + issue #2517 for + more details. + Signed-off-by: Andrew Ross <andrross@amazon.com> + +* __Bump the version to 1.3.1 (#2509)__ + + [Zelin Hao](mailto:87548827+zelinh@users.noreply.github.com) - Mon, 21 Mar 2022 10:30:00 -0400 + + + Signed-off-by: Zelin Hao <zelinhao@amazon.com> From 0cbd47c799b602c7bb8a68a1dea0d6677c564733 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Thu, 21 Apr 2022 17:21:51 -0400 Subject: [PATCH 40/41] Remove JavaVersion in favour of standard Runtime.Version (java-version-checker) (#3027) Signed-off-by: Andriy Redko --- .../java_version_checker/JavaVersion.java | 83 ------------------- .../JavaVersionChecker.java | 17 ++-- .../tools/launchers/JvmErgonomics.java | 14 ---- .../tools/launchers/JvmOptionsParser.java | 4 +- .../tools/launchers/SystemJvmOptions.java | 12 +-- .../tools/launchers/JvmErgonomicsTests.java | 6 -- 6 files changed, 9 insertions(+), 127 deletions(-) delete mode 100644 distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersion.java diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersion.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersion.java deleted file mode 100644 index 7873f29fdff69..0000000000000 --- a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersion.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.tools.java_version_checker; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; - -public class JavaVersion { - - public static final List CURRENT = parse(System.getProperty("java.specification.version")); - public static final List JAVA_8 = parse("1.8"); - public static final List JAVA_11 = parse("11"); - - static List parse(final String value) { - if (!value.matches("^0*[0-9]+(\\.[0-9]+)*$")) { - throw new IllegalArgumentException(value); - } - - final List version = new ArrayList(); - final String[] components = value.split("\\."); - for (final String component : components) { - version.add(Integer.valueOf(component)); - } - return version; - } - - public static int majorVersion(final List javaVersion) { - Objects.requireNonNull(javaVersion); - if (javaVersion.get(0) > 1) { - return javaVersion.get(0); - } else { - return javaVersion.get(1); - } - } - - static int compare(final List left, final List right) { - // lexicographically compare two lists, treating missing entries as zeros - final int len = Math.max(left.size(), right.size()); - for (int i = 0; i < len; i++) { - final int l = (i < left.size()) ? left.get(i) : 0; - final int r = (i < right.size()) ? right.get(i) : 0; - if (l < r) { - return -1; - } - if (r < l) { - return 1; - } - } - return 0; - } - -} diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersionChecker.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersionChecker.java index e8ff5f3e6f3f2..eb5c7cf1592e7 100644 --- a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersionChecker.java +++ b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/JavaVersionChecker.java @@ -32,18 +32,19 @@ package org.opensearch.tools.java_version_checker; +import java.lang.Runtime.Version; import java.util.Arrays; import java.util.Locale; /** - * Simple program that checks if the runtime Java version is at least 1.8. + * Simple program that checks if the runtime Java version is at least 11 */ final class JavaVersionChecker { private JavaVersionChecker() {} /** - * The main entry point. The exit code is 0 if the Java version is at least 1.8, otherwise the exit code is 1. + * The main entry point. The exit code is 0 if the Java version is at least 11, otherwise the exit code is 1. * * @param args the args to the program which are rejected if not empty */ @@ -52,23 +53,15 @@ public static void main(final String[] args) { if (args.length != 0) { throw new IllegalArgumentException("expected zero arguments but was " + Arrays.toString(args)); } - if (JavaVersion.compare(JavaVersion.CURRENT, JavaVersion.JAVA_8) < 0) { + if (Runtime.version().compareTo(Version.parse("11")) < 0) { final String message = String.format( Locale.ROOT, - "the minimum required Java version is 8; your Java version from [%s] does not meet this requirement", + "OpenSearch requires Java 11; your Java version from [%s] does not meet this requirement", System.getProperty("java.home") ); errPrintln(message); exit(1); } - if (JavaVersion.compare(JavaVersion.CURRENT, JavaVersion.JAVA_11) < 0) { - final String message = String.format( - Locale.ROOT, - "future versions of OpenSearch will require Java 11; your Java version from [%s] does not meet this requirement", - System.getProperty("java.home") - ); - errPrintln(message); - } exit(0); } diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmErgonomics.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmErgonomics.java index 053d892d0ec2f..cd4bea689f776 100644 --- a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmErgonomics.java +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmErgonomics.java @@ -32,8 +32,6 @@ package org.opensearch.tools.launchers; -import org.opensearch.tools.java_version_checker.JavaVersion; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -73,19 +71,7 @@ static List choose(final List userDefinedJvmOptions) throws Inte final long heapSize = extractHeapSize(finalJvmOptions); final long maxDirectMemorySize = extractMaxDirectMemorySize(finalJvmOptions); - if (System.getProperty("os.name").startsWith("Windows") && JavaVersion.majorVersion(JavaVersion.CURRENT) == 8) { - Launchers.errPrintln("Warning: with JDK 8 on Windows, OpenSearch may be unable to derive correct"); - Launchers.errPrintln(" ergonomic settings due to a JDK issue (JDK-8074459). Please use a newer"); - Launchers.errPrintln(" version of Java."); - } - if (maxDirectMemorySize == 0 && userDefinedJvmOptions.stream().noneMatch(s -> s.startsWith("-XX:MaxDirectMemorySize"))) { - - if (System.getProperty("os.name").startsWith("Windows") && JavaVersion.majorVersion(JavaVersion.CURRENT) == 8) { - Launchers.errPrintln("Warning: MaxDirectMemorySize may have been miscalculated due to JDK-8074459."); - Launchers.errPrintln(" Please use a newer version of Java or set MaxDirectMemorySize explicitly."); - } - ergonomicChoices.add("-XX:MaxDirectMemorySize=" + heapSize / 2); } return ergonomicChoices; diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmOptionsParser.java index 7703efdc56986..533d1f7e782ba 100644 --- a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmOptionsParser.java +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/JvmOptionsParser.java @@ -32,8 +32,6 @@ package org.opensearch.tools.launchers; -import org.opensearch.tools.java_version_checker.JavaVersion; - import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; @@ -183,7 +181,7 @@ List readJvmOptionsFiles(final Path config) throws IOException, JvmOptio Reader reader = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(reader) ) { - parse(JavaVersion.majorVersion(JavaVersion.CURRENT), br, jvmOptions::add, invalidLines::put); + parse(Runtime.version().feature(), br, jvmOptions::add, invalidLines::put); } if (invalidLines.isEmpty() == false) { throw new JvmOptionsFileParserException(jvmOptionsFile, invalidLines); diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java index 738d57951c4ef..fc613ccdaae68 100644 --- a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java @@ -32,8 +32,6 @@ package org.opensearch.tools.launchers; -import org.opensearch.tools.java_version_checker.JavaVersion; - import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -86,7 +84,7 @@ static List systemJvmOptions() { } private static String maybeShowCodeDetailsInExceptionMessages() { - if (JavaVersion.majorVersion(JavaVersion.CURRENT) >= 14) { + if (Runtime.version().feature() >= 14) { return "-XX:+ShowCodeDetailsInExceptionMessages"; } else { return ""; @@ -101,14 +99,10 @@ private static String javaLocaleProviders() { * * Due to internationalization enhancements in JDK 9 OpenSearch need to set the provider to COMPAT otherwise time/date * parsing will break in an incompatible way for some date patterns and locales. - * //TODO COMPAT will be deprecated in jdk14 https://bugs.openjdk.java.net/browse/JDK-8232906 + * //TODO COMPAT will be deprecated in at some point, see please https://bugs.openjdk.java.net/browse/JDK-8232906 * See also: documentation in server/org.opensearch.common.time.IsoCalendarDataProvider */ - if (JavaVersion.majorVersion(JavaVersion.CURRENT) == 8) { - return "-Djava.locale.providers=SPI,JRE"; - } else { - return "-Djava.locale.providers=SPI,COMPAT"; - } + return "-Djava.locale.providers=SPI,COMPAT"; } } diff --git a/distribution/tools/launchers/src/test/java/org/opensearch/tools/launchers/JvmErgonomicsTests.java b/distribution/tools/launchers/src/test/java/org/opensearch/tools/launchers/JvmErgonomicsTests.java index ffdf2c2898032..5a8c9841aa0fe 100644 --- a/distribution/tools/launchers/src/test/java/org/opensearch/tools/launchers/JvmErgonomicsTests.java +++ b/distribution/tools/launchers/src/test/java/org/opensearch/tools/launchers/JvmErgonomicsTests.java @@ -32,8 +32,6 @@ package org.opensearch.tools.launchers; -import org.opensearch.tools.java_version_checker.JavaVersion; - import java.io.IOException; import java.util.Arrays; import java.util.Collections; @@ -69,8 +67,6 @@ public void testExtractValidHeapSizeUsingMaxHeapSize() throws InterruptedExcepti } public void testExtractValidHeapSizeNoOptionPresent() throws InterruptedException, IOException { - // Muted for jdk8/Windows, see: https://github.com/elastic/elasticsearch/issues/47384 - assumeFalse(System.getProperty("os.name").startsWith("Windows") && JavaVersion.majorVersion(JavaVersion.CURRENT) == 8); assertThat(JvmErgonomics.extractHeapSize(JvmErgonomics.finalJvmOptions(Collections.emptyList())), greaterThan(0L)); } @@ -141,8 +137,6 @@ public void testExtractNoSystemProperties() { } public void testMaxDirectMemorySizeChoice() throws InterruptedException, IOException { - // Muted for jdk8/Windows, see: https://github.com/elastic/elasticsearch/issues/47384 - assumeFalse(System.getProperty("os.name").startsWith("Windows") && JavaVersion.majorVersion(JavaVersion.CURRENT) == 8); final Map heapMaxDirectMemorySize = new HashMap<>(); heapMaxDirectMemorySize.put("64M", Long.toString((64L << 20) / 2)); heapMaxDirectMemorySize.put("512M", Long.toString((512L << 20) / 2)); From 3d49ccead2fed77a747534c5e50096b5578981b3 Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Thu, 21 Apr 2022 16:36:01 -0500 Subject: [PATCH 41/41] [Upgrade] Lucene-9.2-snapshot (#2924) --- buildSrc/version.properties | 2 +- .../lucene-expressions-9.1.0.jar.sha1 | 1 - ...xpressions-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-icu-9.1.0.jar.sha1 | 1 - ...alysis-icu-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-kuromoji-9.1.0.jar.sha1 | 1 - ...s-kuromoji-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-nori-9.1.0.jar.sha1 | 1 - ...lysis-nori-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-phonetic-9.1.0.jar.sha1 | 1 - ...s-phonetic-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-smartcn-9.1.0.jar.sha1 | 1 - ...is-smartcn-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-stempel-9.1.0.jar.sha1 | 1 - ...is-stempel-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-analysis-morfologik-9.1.0.jar.sha1 | 1 - ...morfologik-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../search/query/QueryProfilePhaseTests.java | 71 ++++++++++++++----- .../lucene-analysis-common-9.1.0.jar.sha1 | 1 - ...sis-common-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-backward-codecs-9.1.0.jar.sha1 | 1 - ...ard-codecs-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-core-9.1.0.jar.sha1 | 1 - ...ucene-core-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../licenses/lucene-grouping-9.1.0.jar.sha1 | 1 - ...e-grouping-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-highlighter-9.1.0.jar.sha1 | 1 - ...ighlighter-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-join-9.1.0.jar.sha1 | 1 - ...ucene-join-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-memory-9.1.0.jar.sha1 | 1 - ...ene-memory-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-misc-9.1.0.jar.sha1 | 1 - ...ucene-misc-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-queries-9.1.0.jar.sha1 | 1 - ...ne-queries-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-queryparser-9.1.0.jar.sha1 | 1 - ...ueryparser-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-sandbox-9.1.0.jar.sha1 | 1 - ...ne-sandbox-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../lucene-spatial-extras-9.1.0.jar.sha1 | 1 - ...ial-extras-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../licenses/lucene-spatial3d-9.1.0.jar.sha1 | 1 - ...-spatial3d-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + server/licenses/lucene-suggest-9.1.0.jar.sha1 | 1 - ...ne-suggest-9.2.0-snapshot-f4f1f70.jar.sha1 | 1 + .../src/main/java/org/opensearch/Version.java | 2 +- .../common/lucene/uid/VersionsTests.java | 2 +- .../search/query/QueryProfilePhaseTests.java | 67 ++++++++++++----- 49 files changed, 125 insertions(+), 63 deletions(-) delete mode 100644 modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analysis-icu-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analysis-nori-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-analysis-common-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-analysis-common-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-core-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-core-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-grouping-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-join-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-join-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-memory-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-memory-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-misc-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-misc-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-queries-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-queries-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-9.2.0-snapshot-f4f1f70.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-9.1.0.jar.sha1 create mode 100644 server/licenses/lucene-suggest-9.2.0-snapshot-f4f1f70.jar.sha1 diff --git a/buildSrc/version.properties b/buildSrc/version.properties index d3499b0df599b..3ade56a6cab21 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ opensearch = 3.0.0 -lucene = 9.1.0 +lucene = 9.2.0-snapshot-f4f1f70 bundled_jdk_vendor = adoptium bundled_jdk = 17.0.2+8 diff --git a/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1 deleted file mode 100644 index c825e197188fc..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2711abb758d101fc738c35a6867ee7559da5308b \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.2.0-snapshot-f4f1f70.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..c6f95e2bdecc7 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +f2a8008e74589f77f1d3da305cf58c88ee01d1c1 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1 deleted file mode 100644 index b7733cfa9a00a..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9b429da553560fa0c363ffc04c774f957c56e14 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..616925e9865ed --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +f9569365e80897f1a9161254d5d2f44a44f95db8 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1 deleted file mode 100644 index f5b818a206e7a..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b247f8a877237b4663e4ab7d86fae21c68a58ea5 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..9ddb30158c6b2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +35c14b39ff522dd6665e74a873035827b865075e \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1 deleted file mode 100644 index 4d22255d10316..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -30e24b42fb0440911e702a531f4373bf397eb8c6 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..3326a5d35baf3 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +a84218a1ea0d5c52d6591d417061518b8a8be4e4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1 deleted file mode 100644 index a0607e6158cdd..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18a321d93836ea2856a5302d192e9dc99c647c6e \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..d822d33da9801 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +057bbd20b15899844b23d2cf034a167b4fe581f0 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1 deleted file mode 100644 index bff959139a86c..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -41c847f39a15bb8495be8c9d8a098974be15f74b \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..55ef27aaff865 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +6a9a8d49e87b6999560a131e16234e46f21e6b42 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1 deleted file mode 100644 index 39d25d7872ea9..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ee7995231b181aa0a01f5aef8775562e269f5ef7 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..ca0f275f4772d --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +fcad3608779e0b3ab8703903b9d28cdc32767d60 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1 deleted file mode 100644 index 9f07f122205d9..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -575c458431396baa7f01a546173807f27b12a087 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.2.0-snapshot-f4f1f70.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..d0f2904e43195 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +bd28479bdf3c076bc89a0d30864188d444410267 \ No newline at end of file diff --git a/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index d2cb77f529793..36c20d029e997 100644 --- a/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/sandbox/plugins/concurrent-search/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -153,7 +153,9 @@ public void testPostFilterDisablesCountOptimization() throws Exception { QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers(), queryPhaseSearcher); assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -183,7 +185,9 @@ public void testPostFilterDisablesCountOptimization() throws Exception { assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, (query) -> { - assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery")); + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertThat(query.getQueryName(), equalTo("ConstantScoreQuery")); assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -265,7 +269,9 @@ public void testMinScoreDisablesCountOptimization() throws Exception { context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers(), queryPhaseSearcher); assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -413,7 +419,9 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -489,27 +497,40 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); - assertProfileData(context, "BooleanQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren(), hasSize(2)); - assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery")); + // rewritten as a ConstantScoreQuery wrapping the original BooleanQuery + // see: https://github.com/apache/lucene/pull/672 + assertThat(query.getProfiledChildren(), hasSize(1)); + assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("BooleanQuery")); assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), equalTo(0L)); - assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery")); - assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), equalTo(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), equalTo(0L)); + List children = query.getProfiledChildren().get(0).getProfiledChildren(); + assertThat(children, hasSize(2)); + assertThat(children.get(0).getQueryName(), equalTo("TermQuery")); + assertThat(children.get(0).getTime(), greaterThan(0L)); + assertThat(children.get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L)); + assertThat(children.get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); + assertThat(children.get(0).getTimeBreakdown().get("score"), equalTo(0L)); + assertThat(children.get(0).getTimeBreakdown().get("score_count"), equalTo(0L)); + + assertThat(children.get(1).getQueryName(), equalTo("TermQuery")); + assertThat(children.get(1).getTime(), greaterThan(0L)); + assertThat(children.get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L)); + assertThat(children.get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); + assertThat(children.get(1).getTimeBreakdown().get("score"), equalTo(0L)); + assertThat(children.get(1).getTimeBreakdown().get("score_count"), equalTo(0L)); }, collector -> { assertThat(collector.getReason(), equalTo("search_terminate_after_count")); assertThat(collector.getTime(), greaterThan(0L)); @@ -597,7 +618,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; assertThat(fieldDoc.fields[0], equalTo(1)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -631,7 +654,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, (query) -> { - assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery")); + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertThat(query.getQueryName(), equalTo("ConstantScoreQuery")); assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -649,7 +674,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -666,7 +693,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -721,7 +750,9 @@ public void testIndexSortScrollOptimization() throws Exception { assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -829,7 +860,9 @@ public void testDisableTopScoreCollection() throws Exception { assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); - assertProfileData(context, "SpanNearQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); diff --git a/server/licenses/lucene-analysis-common-9.1.0.jar.sha1 b/server/licenses/lucene-analysis-common-9.1.0.jar.sha1 deleted file mode 100644 index 4d2a9cf9451cc..0000000000000 --- a/server/licenses/lucene-analysis-common-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -240e3997fb139ff001e022124c89b686b5a8498d \ No newline at end of file diff --git a/server/licenses/lucene-analysis-common-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-analysis-common-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..5c667d1aec446 --- /dev/null +++ b/server/licenses/lucene-analysis-common-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +1f7c4b91c8ef9f65e85c5190080b3f796076f355 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.1.0.jar.sha1 b/server/licenses/lucene-backward-codecs-9.1.0.jar.sha1 deleted file mode 100644 index b6df56db28cd6..0000000000000 --- a/server/licenses/lucene-backward-codecs-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -de23bdacb09e8b39cbe876ff79c7a5b2ecc1faa6 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-backward-codecs-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..b8e8fc5a9e529 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +e3ee195405dc0cb249fe2eb3f3a6a848c4686645 \ No newline at end of file diff --git a/server/licenses/lucene-core-9.1.0.jar.sha1 b/server/licenses/lucene-core-9.1.0.jar.sha1 deleted file mode 100644 index 45e7ae47dae3e..0000000000000 --- a/server/licenses/lucene-core-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0375603f1dacd8266526404faf0088a2ac8ec2ff \ No newline at end of file diff --git a/server/licenses/lucene-core-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-core-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..dd6af54584cc3 --- /dev/null +++ b/server/licenses/lucene-core-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +f1cb45d20f7f23c420c56a94e9153e96bfdd6e1f \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.1.0.jar.sha1 b/server/licenses/lucene-grouping-9.1.0.jar.sha1 deleted file mode 100644 index be423fdde04f7..0000000000000 --- a/server/licenses/lucene-grouping-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -703308505e62fa7dcb0bf64fdb6d95d335941bdc \ No newline at end of file diff --git a/server/licenses/lucene-grouping-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-grouping-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..3f8cede90a0a7 --- /dev/null +++ b/server/licenses/lucene-grouping-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +29052ac8f5255c8df2bb1d3d0da94e112c181679 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.1.0.jar.sha1 b/server/licenses/lucene-highlighter-9.1.0.jar.sha1 deleted file mode 100644 index c130c27ed4c37..0000000000000 --- a/server/licenses/lucene-highlighter-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f1925f6ef985000399a277ca17b8f67d3056838 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-highlighter-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..161099cac2dda --- /dev/null +++ b/server/licenses/lucene-highlighter-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +d710569c00d561c70d8290de4c4c15fe9735f94f \ No newline at end of file diff --git a/server/licenses/lucene-join-9.1.0.jar.sha1 b/server/licenses/lucene-join-9.1.0.jar.sha1 deleted file mode 100644 index b678051ddaf26..0000000000000 --- a/server/licenses/lucene-join-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7d39da8e623c99ee8da8bcc0185b2d908aca4b3 \ No newline at end of file diff --git a/server/licenses/lucene-join-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-join-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..5a256ed582f53 --- /dev/null +++ b/server/licenses/lucene-join-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +0f3081b32664d8ca6318e69dd054befb5f9a334b \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.1.0.jar.sha1 b/server/licenses/lucene-memory-9.1.0.jar.sha1 deleted file mode 100644 index a07b052e9c332..0000000000000 --- a/server/licenses/lucene-memory-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -209166fd48dae3261ccf26990fe600332b8fb373 \ No newline at end of file diff --git a/server/licenses/lucene-memory-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-memory-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..9504a0e4b2cf8 --- /dev/null +++ b/server/licenses/lucene-memory-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +81b63e23b87c054c140ff6a1e2e6696ca750d51c \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.1.0.jar.sha1 b/server/licenses/lucene-misc-9.1.0.jar.sha1 deleted file mode 100644 index 8627e481c6214..0000000000000 --- a/server/licenses/lucene-misc-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -905d93b6389060cf4b0cb464ffa8fa2db81b60e7 \ No newline at end of file diff --git a/server/licenses/lucene-misc-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-misc-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..7011446c479a4 --- /dev/null +++ b/server/licenses/lucene-misc-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +0cdea200c1890b877d26ce58b7d797f122bb8328 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.1.0.jar.sha1 b/server/licenses/lucene-queries-9.1.0.jar.sha1 deleted file mode 100644 index 9e81da7ca5c15..0000000000000 --- a/server/licenses/lucene-queries-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c50fc971573910ea239ee6f275e9257b6b6bdd48 \ No newline at end of file diff --git a/server/licenses/lucene-queries-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-queries-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..eac4f3a6abc7d --- /dev/null +++ b/server/licenses/lucene-queries-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +50d8395e3afc502f267cb308399ab783edfabec0 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.1.0.jar.sha1 b/server/licenses/lucene-queryparser-9.1.0.jar.sha1 deleted file mode 100644 index fb04adf2051d0..0000000000000 --- a/server/licenses/lucene-queryparser-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -383eb69b12f9d9c98c44237155f50c870c9a34b9 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-queryparser-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..b00bd083b9e4f --- /dev/null +++ b/server/licenses/lucene-queryparser-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +815b394c8be5cbb9673011953da38d39a843b0fa \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.1.0.jar.sha1 b/server/licenses/lucene-sandbox-9.1.0.jar.sha1 deleted file mode 100644 index 429a84de46f3c..0000000000000 --- a/server/licenses/lucene-sandbox-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0c728684e750a63f881998fbe27afd897f739762 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-sandbox-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..9f26af782d88c --- /dev/null +++ b/server/licenses/lucene-sandbox-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +7bb7c539172dc3513d4f34e7f29d2cd3a0352361 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.1.0.jar.sha1 b/server/licenses/lucene-spatial-extras-9.1.0.jar.sha1 deleted file mode 100644 index 7078cbc05fff7..0000000000000 --- a/server/licenses/lucene-spatial-extras-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94d7d107c399cd11d407b94fa62f5677fe86f63b \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-spatial-extras-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..8cf21bea02089 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +7bc2f2e37f866e3b376f083e4b7cc89a8cb45fd0 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.1.0.jar.sha1 b/server/licenses/lucene-spatial3d-9.1.0.jar.sha1 deleted file mode 100644 index 604e8ed054ac1..0000000000000 --- a/server/licenses/lucene-spatial3d-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7717b300bc14dfa9eb4b7d5970d8e25a60010e64 \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-spatial3d-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..1407cc666b3c3 --- /dev/null +++ b/server/licenses/lucene-spatial3d-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +a5f79bb1f8337dbe6fc50fc5abd46d4eaaf4d433 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.1.0.jar.sha1 b/server/licenses/lucene-suggest-9.1.0.jar.sha1 deleted file mode 100644 index 4562a19706634..0000000000000 --- a/server/licenses/lucene-suggest-9.1.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -957fca507eba94dbc3ef0d02377839be49bbe619 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-9.2.0-snapshot-f4f1f70.jar.sha1 b/server/licenses/lucene-suggest-9.2.0-snapshot-f4f1f70.jar.sha1 new file mode 100644 index 0000000000000..30772395b4cda --- /dev/null +++ b/server/licenses/lucene-suggest-9.2.0-snapshot-f4f1f70.jar.sha1 @@ -0,0 +1 @@ +9e74f66171ea215e8c4936588381c0950c290c80 \ No newline at end of file diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index e68305df20869..ad3546b2498f2 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -83,7 +83,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_3_2 = new Version(1030299, org.apache.lucene.util.Version.LUCENE_8_10_1); public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_9_1_0); public static final Version V_2_1_0 = new Version(2010099, org.apache.lucene.util.Version.LUCENE_9_1_0); - public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_1_0); + public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_2_0); public static final Version CURRENT = V_3_0_0; public static Version readVersion(StreamInput in) throws IOException { diff --git a/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java b/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java index dace484f80c2b..8b22ff56e9abc 100644 --- a/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java +++ b/server/src/test/java/org/opensearch/common/lucene/uid/VersionsTests.java @@ -224,7 +224,7 @@ public void testLuceneVersionOnUnknownVersions() { assertEquals(VersionUtils.getFirstVersion().luceneVersion.major - 1, version.luceneVersion.major); // future version, should be the same version as today - version = Version.fromString("2.77.1"); + version = Version.fromString(Version.CURRENT.major + ".77.1"); assertEquals(Version.CURRENT.luceneVersion, version.luceneVersion); } } diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index dfa41edb5cff2..1b168e7d5b16c 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -127,7 +127,7 @@ public void testPostFilterDisablesCountOptimization() throws Exception { QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers()); assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertProfileData(context, "MatchAllDocsQuery", query -> { + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -157,7 +157,7 @@ public void testPostFilterDisablesCountOptimization() throws Exception { assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, (query) -> { - assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery")); + assertThat(query.getQueryName(), equalTo("ConstantScoreQuery")); assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -239,7 +239,9 @@ public void testMinScoreDisablesCountOptimization() throws Exception { context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); QueryPhase.executeInternal(context.withCleanQueryResult().withProfilers()); assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -387,7 +389,9 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertTrue(context.queryResult().terminatedEarly()); assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -463,27 +467,40 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); - assertProfileData(context, "BooleanQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren(), hasSize(2)); - assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("TermQuery")); + // rewritten as a ConstantScoreQuery wrapping the original BooleanQuery + // see: https://github.com/apache/lucene/pull/672 + assertThat(query.getProfiledChildren(), hasSize(1)); + assertThat(query.getProfiledChildren().get(0).getQueryName(), equalTo("BooleanQuery")); assertThat(query.getProfiledChildren().get(0).getTime(), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), equalTo(0L)); - assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery")); - assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), equalTo(0L)); - assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), equalTo(0L)); + List children = query.getProfiledChildren().get(0).getProfiledChildren(); + assertThat(children, hasSize(2)); + assertThat(children.get(0).getQueryName(), equalTo("TermQuery")); + assertThat(children.get(0).getTime(), greaterThan(0L)); + assertThat(children.get(0).getTimeBreakdown().get("create_weight"), greaterThan(0L)); + assertThat(children.get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); + assertThat(children.get(0).getTimeBreakdown().get("score"), equalTo(0L)); + assertThat(children.get(0).getTimeBreakdown().get("score_count"), equalTo(0L)); + + assertThat(children.get(1).getQueryName(), equalTo("TermQuery")); + assertThat(children.get(1).getTime(), greaterThan(0L)); + assertThat(children.get(1).getTimeBreakdown().get("create_weight"), greaterThan(0L)); + assertThat(children.get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); + assertThat(children.get(1).getTimeBreakdown().get("score"), equalTo(0L)); + assertThat(children.get(1).getTimeBreakdown().get("score_count"), equalTo(0L)); }, collector -> { assertThat(collector.getReason(), equalTo("search_terminate_after_count")); assertThat(collector.getTime(), greaterThan(0L)); @@ -571,7 +588,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; assertThat(fieldDoc.fields[0], equalTo(1)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -605,7 +624,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, (query) -> { - assertThat(query.getQueryName(), equalTo("MatchAllDocsQuery")); + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertThat(query.getQueryName(), equalTo("ConstantScoreQuery")); assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -623,7 +644,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -640,7 +663,9 @@ public void testIndexSortingEarlyTermination() throws Exception { assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); assertThat(fieldDoc.fields[0], anyOf(equalTo(1), equalTo(2))); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -695,7 +720,9 @@ public void testIndexSortScrollOptimization() throws Exception { assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertProfileData(context, "MatchAllDocsQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L)); @@ -806,7 +833,9 @@ public void testDisableTopScoreCollection() throws Exception { assertEquals(numDocs / 2, context.queryResult().topDocs().topDocs.totalHits.value); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); - assertProfileData(context, "SpanNearQuery", query -> { + // IndexSearcher#rewrite optimizes by rewriting non-scoring queries to ConstantScoreQuery + // see: https://github.com/apache/lucene/pull/672 + assertProfileData(context, "ConstantScoreQuery", query -> { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), equalTo(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(0L));