From 7dc2cc6b7f68ca7bbdb138fc239dda8204df4556 Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 21 Nov 2024 14:55:38 +0100 Subject: [PATCH 01/50] IndexNameExpressionResolver refactoring (#116085) * Refactor DateMathExpressionResolver. In this commit we reduce the scope of the DateMathExpressionResolver to only handle one expression at a time. This simplifies the code since it move the preprocessing from the date math calculation. Furthermore, we simplify the API, so it does not need a context. Finally, the reduced scope allowed us to reduce the test footprint. The tests are targeted only to the single expression date math resolution and any test with expression combinations will be moved to the IndexNameExpressionResolverTests. * Create SystemResourceAccess. In this class we collect all the related access checks to system indices. These checks are not straight forward and there are different rules that apply on different parts of the code. In this PR, we just collect them in one place to allow further analysis to determine if these differences are a feature or a bug. * Refactor WildcardExpressionResolver. In this PR we reduced the scope of the WildcardExpressionResolver to resolve one expression at a time. It also still supports the `*`. This allows us to reduce the scope of the test as well. Furthermore, we switched the usage of streams to more imperative code to reduce the object creation. * Refactor expression resolution to resources. In this PR we bring all the previous steps together. We change the expression resolution, instead of processing lists of expressions to completely resolve one expression to its resources before moving to the next. This intends to increase the maintainability of the code, because we can debug it easier and we reduce the code duplication when dealing with exclusions and other pre-processing tasks. * Fix format * Bug fix: do the empty check on wildcard expressions on each wildcard * Polishing * Optimise for no wildcards * Fix test name typo * Replace for-each loops with for-i loops --------- Co-authored-by: Elastic Machine Co-authored-by: James Baiera --- .../metadata/IndexNameExpressionResolver.java | 835 ++++++++++-------- .../DateMathExpressionResolverTests.java | 201 ++--- .../IndexNameExpressionResolverTests.java | 182 ++-- .../WildcardExpressionResolverTests.java | 306 ++----- .../core/ilm/GenerateSnapshotNameStep.java | 6 +- .../ilm/GenerateSnapshotNameStepTests.java | 10 +- 6 files changed, 677 insertions(+), 863 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index bf80c38d64a4e..279243eeff7cf 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -48,17 +48,24 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.LongSupplier; import java.util.function.Predicate; -import java.util.stream.Collectors; -import java.util.stream.Stream; +/** + * This class main focus is to resolve multi-syntax target expressions to resources or concrete indices. This resolution is influenced + * by IndicesOptions and other flags passed through the method call. Examples of the functionality it provides: + * - Resolve expressions to concrete indices + * - Resolve expressions to data stream names + * - Resolve expressions to resources (meaning indices, data streams and aliases) + * Note: This class is performance sensitive, so we pay extra attention on the data structure usage and we avoid streams and iterators + * when possible in favor of the classic for-i loops. + */ public class IndexNameExpressionResolver { private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(IndexNameExpressionResolver.class); @@ -190,7 +197,7 @@ public List dataStreamNames(ClusterState state, IndicesOptions options, getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); return expressions.stream() .map(x -> state.metadata().getIndicesLookup().get(x)) .filter(Objects::nonNull) @@ -220,7 +227,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit getNetNewSystemIndexPredicate() ); - final Collection expressions = resolveExpressions(context, request.index()); + final Collection expressions = resolveExpressionsToResources(context, request.index()); if (expressions.size() == 1) { IndexAbstraction ia = state.metadata().getIndicesLookup().get(expressions.iterator().next()); @@ -236,7 +243,7 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit ); } } - checkSystemIndexAccess(context, Set.of(ia.getWriteIndex())); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, ia.getWriteIndex()); return ia; } else { throw new IllegalArgumentException( @@ -245,30 +252,110 @@ public IndexAbstraction resolveWriteIndexAbstraction(ClusterState state, DocWrit } } - protected static Collection resolveExpressions(Context context, String... expressions) { - if (context.getOptions().expandWildcardExpressions() == false) { + /** + * Resolve the expression to the set of indices, aliases, and, optionally, data streams that the expression matches. + * If {@param preserveDataStreams} is {@code true}, data streams that are covered by the wildcards from the + * {@param expressions} are returned as-is, without expanding them further to their respective backing indices. + */ + protected static Collection resolveExpressionsToResources(Context context, String... expressions) { + // If we do not expand wildcards, then empty or _all expression result in an empty list + boolean expandWildcards = context.getOptions().expandWildcardExpressions(); + if (expandWildcards == false) { if (expressions == null || expressions.length == 0 || expressions.length == 1 && Metadata.ALL.equals(expressions[0])) { return List.of(); - } else { - return ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ); } } else { if (expressions == null || expressions.length == 0 || expressions.length == 1 && (Metadata.ALL.equals(expressions[0]) || Regex.isMatchAllPattern(expressions[0]))) { return WildcardExpressionResolver.resolveAll(context); + } else if (isNoneExpression(expressions)) { + return List.of(); + } + } + + // Using ArrayList when we know we do not have wildcards is an optimisation, given that one expression result in 0 or 1 resources. + Collection resources = expandWildcards && WildcardExpressionResolver.hasWildcards(expressions) + ? new LinkedHashSet<>() + : new ArrayList<>(expressions.length); + boolean wildcardSeen = false; + for (int i = 0, n = expressions.length; i < n; i++) { + String originalExpression = expressions[i]; + + // Resolve exclusion, a `-` prefixed expression is an exclusion only if it succeeds a wildcard. + boolean isExclusion = wildcardSeen && originalExpression.startsWith("-"); + String baseExpression = isExclusion ? originalExpression.substring(1) : originalExpression; + + // Resolve date math + baseExpression = DateMathExpressionResolver.resolveExpression(baseExpression, context::getStartTime); + + // Validate base expression + validateResourceExpression(context, baseExpression, expressions); + + // Check if it's wildcard + boolean isWildcard = expandWildcards && WildcardExpressionResolver.isWildcard(originalExpression); + wildcardSeen |= isWildcard; + + if (isWildcard) { + Set matchingResources = WildcardExpressionResolver.matchWildcardToResources(context, baseExpression); + + if (context.getOptions().allowNoIndices() == false && matchingResources.isEmpty()) { + throw notFoundException(baseExpression); + } + + if (isExclusion) { + resources.removeAll(matchingResources); + } else { + resources.addAll(matchingResources); + } } else { - return WildcardExpressionResolver.resolve( - context, - ExplicitResourceNameFilter.filterUnavailable( - context, - DateMathExpressionResolver.resolve(context, Arrays.asList(expressions)) - ) - ); + if (isExclusion) { + resources.remove(baseExpression); + } else if (ensureAliasOrIndexExists(context, baseExpression)) { + resources.add(baseExpression); + } + } + } + return resources; + } + + /** + * Validates the requested expression by performing the following checks: + * - Ensure it's not empty + * - Ensure it doesn't start with `_` + * - Ensure it's not a remote expression unless the allow unavailable targets is enabled. + */ + private static void validateResourceExpression(Context context, String current, String[] expressions) { + if (Strings.isEmpty(current)) { + throw notFoundException(current); + } + // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API + // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, + // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown + // if the expression can't be found. + if (current.charAt(0) == '_') { + throw new InvalidIndexNameException(current, "must not start with '_'."); + } + ensureRemoteExpressionRequireIgnoreUnavailable(context.getOptions(), current, expressions); + } + + /** + * Throws an exception if the expression is a remote expression and we do not allow unavailable targets + */ + private static void ensureRemoteExpressionRequireIgnoreUnavailable(IndicesOptions options, String current, String[] expressions) { + if (options.ignoreUnavailable()) { + return; + } + if (RemoteClusterAware.isRemoteIndexName(current)) { + List crossClusterIndices = new ArrayList<>(); + for (int i = 0; i < expressions.length; i++) { + if (RemoteClusterAware.isRemoteIndexName(expressions[i])) { + crossClusterIndices.add(expressions[i]); + } } + throw new IllegalArgumentException( + "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices + ); } } @@ -341,7 +428,7 @@ String[] concreteIndexNames(Context context, String... indexExpressions) { } Index[] concreteIndices(Context context, String... indexExpressions) { - final Collection expressions = resolveExpressions(context, indexExpressions); + final Collection expressions = resolveExpressionsToResources(context, indexExpressions); final Set concreteIndicesResult = Sets.newLinkedHashSetWithExpectedSize(expressions.size()); final Map indicesLookup = context.getState().metadata().getIndicesLookup(); @@ -395,7 +482,9 @@ Index[] concreteIndices(Context context, String... indexExpressions) { && context.getOptions().includeFailureIndices()) { // Collect the data streams involved Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); aliasDataStreams.add(indicesLookup.get(index.getName()).getParentDataStream()); } for (DataStream dataStream : aliasDataStreams) { @@ -416,13 +505,16 @@ Index[] concreteIndices(Context context, String... indexExpressions) { if (context.getOptions().allowNoIndices() == false && concreteIndicesResult.isEmpty()) { throw notFoundException(indexExpressions); } - checkSystemIndexAccess(context, concreteIndicesResult); - return concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + Index[] resultArray = concreteIndicesResult.toArray(Index.EMPTY_ARRAY); + SystemResourceAccess.checkSystemIndexAccess(context, threadContext, resultArray); + return resultArray; } private static void resolveIndicesForDataStream(Context context, DataStream dataStream, Set concreteIndicesResult) { if (shouldIncludeRegularIndices(context.getOptions())) { - for (Index index : dataStream.getIndices()) { + List indices = dataStream.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -431,7 +523,9 @@ private static void resolveIndicesForDataStream(Context context, DataStream data if (shouldIncludeFailureIndices(context.getOptions())) { // We short-circuit here, if failure indices are not allowed and they can be skipped if (context.getOptions().allowFailureIndices() || context.getOptions().ignoreUnavailable() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + List failureIndices = dataStream.getFailureIndices().getIndices(); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { concreteIndicesResult.add(index); } @@ -482,64 +576,6 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract return indexAbstraction.getIndices().size() > 1; } - private void checkSystemIndexAccess(Context context, Set concreteIndices) { - final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); - if (systemIndexAccessPredicate == Predicates.always()) { - return; - } - doCheckSystemIndexAccess(context, concreteIndices, systemIndexAccessPredicate); - } - - private void doCheckSystemIndexAccess(Context context, Set concreteIndices, Predicate systemIndexAccessPredicate) { - final Metadata metadata = context.getState().metadata(); - final List resolvedSystemIndices = new ArrayList<>(); - final List resolvedNetNewSystemIndices = new ArrayList<>(); - final Set resolvedSystemDataStreams = new HashSet<>(); - final SortedMap indicesLookup = metadata.getIndicesLookup(); - boolean matchedIndex = false; - for (Index concreteIndex : concreteIndices) { - IndexMetadata idxMetadata = metadata.index(concreteIndex); - String name = concreteIndex.getName(); - if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { - matchedIndex = true; - IndexAbstraction indexAbstraction = indicesLookup.get(name); - if (indexAbstraction.getParentDataStream() != null) { - resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); - } else if (systemIndices.isNetNewSystemIndex(name)) { - resolvedNetNewSystemIndices.add(name); - } else { - resolvedSystemIndices.add(name); - } - } - } - if (matchedIndex) { - handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices); - } - } - - private void handleMatchedSystemIndices( - List resolvedSystemIndices, - Set resolvedSystemDataStreams, - List resolvedNetNewSystemIndices - ) { - if (resolvedSystemIndices.isEmpty() == false) { - Collections.sort(resolvedSystemIndices); - deprecationLogger.warn( - DeprecationCategory.API, - "open_system_index_access", - "this request accesses system indices: {}, but in a future major version, direct access to system " - + "indices will be prevented by default", - resolvedSystemIndices - ); - } - if (resolvedSystemDataStreams.isEmpty() == false) { - throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); - } - if (resolvedNetNewSystemIndices.isEmpty() == false) { - throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); - } - } - private static IndexNotFoundException notFoundException(String... indexExpressions) { final IndexNotFoundException infe; if (indexExpressions == null @@ -568,16 +604,16 @@ private static IndexNotFoundException notFoundException(String... indexExpressio } private static boolean shouldTrackConcreteIndex(Context context, Index index) { - if (context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY - && context.netNewSystemIndexPredicate.test(index.getName())) { + if (SystemResourceAccess.isNetNewInBackwardCompatibleMode(context, index)) { // Exclude this one as it's a net-new system index, and we explicitly don't want those. return false; } + IndicesOptions options = context.getOptions(); if (DataStream.isFailureStoreFeatureFlagEnabled() && context.options.allowFailureIndices() == false) { DataStream parentDataStream = context.getState().metadata().getIndicesLookup().get(index.getName()).getParentDataStream(); if (parentDataStream != null && parentDataStream.isFailureStoreEnabled()) { if (parentDataStream.isFailureStoreIndex(index.getName())) { - if (context.options.ignoreUnavailable()) { + if (options.ignoreUnavailable()) { return false; } else { throw new FailureIndexNotSupportedException(index); @@ -587,7 +623,6 @@ private static boolean shouldTrackConcreteIndex(Context context, Index index) { } final IndexMetadata imd = context.state.metadata().index(index); if (imd.getState() == IndexMetadata.State.CLOSE) { - IndicesOptions options = context.options; if (options.forbidClosedIndices() && options.ignoreUnavailable() == false) { throw new IndexClosedException(index); } else { @@ -721,21 +756,6 @@ public boolean hasIndexAbstraction(String indexAbstraction, ClusterState state) return state.metadata().hasIndexAbstraction(resolvedAliasOrIndex); } - /** - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression) { - return DateMathExpressionResolver.resolveExpression(dateExpression); - } - - /** - * @param time instant to consider when parsing the expression - * @return If the specified string is data math expression then this method returns the resolved expression. - */ - public static String resolveDateMathExpression(String dateExpression, long time) { - return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); - } - /** * Resolve an array of expressions to the set of indices and aliases that these expressions match. */ @@ -765,7 +785,8 @@ public Set resolveExpressions( getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - Collection resolved = resolveExpressions(context, expressions); + // unmodifiable without creating a new collection as it might contain many items + Collection resolved = resolveExpressionsToResources(context, expressions); if (resolved instanceof Set) { // unmodifiable without creating a new collection as it might contain many items return Collections.unmodifiableSet((Set) resolved); @@ -779,7 +800,7 @@ public Set resolveExpressions( * given index. *

Only aliases with filters are returned. If the indices list contains a non-filtering reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressions}. + * NOTE: The provided expressions must have been resolved already via {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] filteringAliases(ClusterState state, String index, Set resolvedExpressions) { return indexAliases(state, index, AliasMetadata::filteringRequired, DataStreamAlias::filteringRequired, false, resolvedExpressions); @@ -799,7 +820,8 @@ boolean iterateIndexAliases(int indexAliasesSize, int resolvedExpressionsSize) { * Iterates through the list of indices and selects the effective list of required aliases for the given index. *

Only aliases where the given predicate tests successfully are returned. If the indices list contains a non-required reference to * the index itself - null is returned. Returns {@code null} if no filtering is required. - *

NOTE: the provided expressions must have been resolved already via {@link #resolveExpressions}. + *

NOTE: the provided expressions must have been resolved already via + * {@link #resolveExpressionsToResources(Context, String...)}. */ public String[] indexAliases( ClusterState state, @@ -878,7 +900,8 @@ public String[] indexAliases( .toArray(AliasMetadata[]::new); } List aliases = null; - for (AliasMetadata aliasMetadata : aliasCandidates) { + for (int i = 0; i < aliasCandidates.length; i++) { + AliasMetadata aliasMetadata = aliasCandidates[i]; if (requiredAlias.test(aliasMetadata)) { // If required - add it to the list of aliases if (aliases == null) { @@ -914,7 +937,7 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab getSystemIndexAccessPredicate(), getNetNewSystemIndexPredicate() ); - final Collection resolvedExpressions = resolveExpressions(context, expressions); + final Collection resolvedExpressions = resolveExpressionsToResources(context, expressions); // TODO: it appears that this can never be true? if (isAllIndices(resolvedExpressions)) { @@ -932,7 +955,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab for (String expression : resolvedExpressions) { IndexAbstraction indexAbstraction = state.metadata().getIndicesLookup().get(expression); if (indexAbstraction != null && indexAbstraction.getType() == Type.ALIAS) { - for (Index index : indexAbstraction.getIndices()) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); String concreteIndex = index.getName(); if (norouting.contains(concreteIndex) == false) { AliasMetadata aliasMetadata = state.metadata().index(concreteIndex).getAliases().get(indexAbstraction.getName()); @@ -961,7 +985,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab continue; } if (dataStream.getIndices() != null) { - for (Index index : dataStream.getIndices()) { + for (int i = 0, n = dataStream.getIndices().size(); i < n; i++) { + Index index = dataStream.getIndices().get(i); String concreteIndex = index.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1006,8 +1031,8 @@ public static Map> resolveSearchRoutingAllIndices(Metadata m Set r = Sets.newHashSet(Strings.splitStringByCommaToArray(routing)); Map> routings = new HashMap<>(); String[] concreteIndices = metadata.getConcreteAllIndices(); - for (String index : concreteIndices) { - routings.put(index, r); + for (int i = 0; i < concreteIndices.length; i++) { + routings.put(concreteIndices[i], r); } return routings; } @@ -1036,6 +1061,16 @@ static boolean isExplicitAllPattern(Collection aliasesOrIndices) { return aliasesOrIndices != null && aliasesOrIndices.size() == 1 && Metadata.ALL.equals(aliasesOrIndices.iterator().next()); } + /** + * Identifies if this expression list is *,-* which effectively means a request that requests no indices. + */ + static boolean isNoneExpression(String[] expressions) { + return expressions.length == 2 && "*".equals(expressions[0]) && "-*".equals(expressions[1]); + } + + /** + * @return the system access level that will be applied in this resolution. See {@link SystemIndexAccessLevel} for details. + */ public SystemIndexAccessLevel getSystemIndexAccessLevel() { final SystemIndexAccessLevel accessLevel = SystemIndices.getSystemIndexAccessLevel(threadContext); assert accessLevel != SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY @@ -1043,6 +1078,14 @@ public SystemIndexAccessLevel getSystemIndexAccessLevel() { return accessLevel; } + /** + * Determines the right predicate based on the {@link IndexNameExpressionResolver#getSystemIndexAccessLevel()}. Specifically: + * - NONE implies no access to net-new system indices and data streams + * - BACKWARDS_COMPATIBLE_ONLY allows access also to net-new system resources + * - ALL allows access to everything + * - otherwise we fall back to {@link SystemIndices#getProductSystemIndexNamePredicate(ThreadContext)} + * @return the predicate that defines the access to system indices. + */ public Predicate getSystemIndexAccessPredicate() { final SystemIndexAccessLevel systemIndexAccessLevel = getSystemIndexAccessLevel(); final Predicate systemIndexAccessLevelPredicate; @@ -1067,6 +1110,43 @@ public Predicate getNetNewSystemIndexPredicate() { return systemIndices::isNetNewSystemIndex; } + /** + * This returns `true` if the given {@param name} is of a resource that exists. + * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of + * exception. + */ + @Nullable + private static boolean ensureAliasOrIndexExists(Context context, String name) { + boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); + IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); + if (indexAbstraction == null) { + if (ignoreUnavailable) { + return false; + } else { + throw notFoundException(name); + } + } + // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) + if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { + if (ignoreUnavailable) { + return false; + } else { + throw aliasesNotSupportedException(name); + } + } + if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { + if (ignoreUnavailable) { + return false; + } else { + IndexNotFoundException infe = notFoundException(name); + // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. + infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); + throw infe; + } + } + return true; + } + public static class Context { private final ClusterState state; @@ -1242,7 +1322,7 @@ public Predicate getSystemIndexAccessPredicate() { } /** - * Resolves alias/index name expressions with wildcards into the corresponding concrete indices/aliases + * Resolves name expressions with wildcards into the corresponding concrete indices/aliases/data streams */ static final class WildcardExpressionResolver { @@ -1251,8 +1331,8 @@ private WildcardExpressionResolver() { } /** - * Returns all the indices, datastreams, and aliases, considering the open/closed, system, and hidden context parameters. - * Depending on the context, returns the names of the datastreams themselves or their backing indices. + * Returns all the indices, data streams, and aliases, considering the open/closed, system, and hidden context parameters. + * Depending on the context, returns the names of the data streams themselves or their backing indices. */ public static Collection resolveAll(Context context) { List concreteIndices = resolveEmptyOrTrivialWildcard(context); @@ -1261,16 +1341,17 @@ public static Collection resolveAll(Context context) { return concreteIndices; } - Stream ias = context.getState() + Set resolved = new HashSet<>(concreteIndices.size()); + context.getState() .metadata() .getIndicesLookup() .values() .stream() .filter(ia -> context.getOptions().expandWildcardsHidden() || ia.isHidden() == false) .filter(ia -> shouldIncludeIfDataStream(ia, context) || shouldIncludeIfAlias(ia, context)) - .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())); + .filter(ia -> ia.isSystem() == false || context.systemIndexAccessPredicate.test(ia.getName())) + .forEach(ia -> resolved.addAll(expandToOpenClosed(context, ia))); - Set resolved = expandToOpenClosed(context, ias).collect(Collectors.toSet()); resolved.addAll(concreteIndices); return resolved; } @@ -1283,73 +1364,6 @@ private static boolean shouldIncludeIfAlias(IndexAbstraction ia, IndexNameExpres return context.getOptions().ignoreAliases() == false && ia.getType() == Type.ALIAS; } - /** - * Returns all the existing resource (index, alias and datastream) names that the {@param expressions} list resolves to. - * The passed-in {@param expressions} can contain wildcards and exclusions, as well as plain resource names. - *
- * The return is a {@code Collection} (usually a {@code Set} but can also be a {@code List}, for performance reasons) of plain - * resource names only. All the returned resources are "accessible", in the given context, i.e. the resources exist - * and are not an alias or a datastream if the context does not permit it. - * Wildcard expressions, depending on the context: - *

    - *
  1. might throw an exception if they don't resolve to anything
  2. - *
  3. might not resolve to hidden or system resources (but plain names can refer to hidden or system resources)
  4. - *
  5. might resolve to aliases and datastreams, and it could be (depending on the context) that their backing indices are what's - * ultimately returned, instead of the alias or datastream name
  6. - *
- */ - public static Collection resolve(Context context, List expressions) { - // fast exit if there are no wildcards to evaluate - if (context.getOptions().expandWildcardExpressions() == false) { - return expressions; - } - int firstWildcardIndex = 0; - for (; firstWildcardIndex < expressions.size(); firstWildcardIndex++) { - String expression = expressions.get(firstWildcardIndex); - if (isWildcard(expression)) { - break; - } - } - if (firstWildcardIndex == expressions.size()) { - return expressions; - } - Set result = new HashSet<>(); - for (int i = 0; i < firstWildcardIndex; i++) { - result.add(expressions.get(i)); - } - AtomicBoolean emptyWildcardExpansion = context.getOptions().allowNoIndices() ? null : new AtomicBoolean(); - for (int i = firstWildcardIndex; i < expressions.size(); i++) { - String expression = expressions.get(i); - boolean isExclusion = i > firstWildcardIndex && expression.charAt(0) == '-'; - if (i == firstWildcardIndex || isWildcard(expression)) { - Stream matchingResources = matchResourcesToWildcard( - context, - isExclusion ? expression.substring(1) : expression - ); - Stream matchingOpenClosedNames = expandToOpenClosed(context, matchingResources); - if (emptyWildcardExpansion != null) { - emptyWildcardExpansion.set(true); - matchingOpenClosedNames = matchingOpenClosedNames.peek(x -> emptyWildcardExpansion.set(false)); - } - if (isExclusion) { - matchingOpenClosedNames.forEach(result::remove); - } else { - matchingOpenClosedNames.forEach(result::add); - } - if (emptyWildcardExpansion != null && emptyWildcardExpansion.get()) { - throw notFoundException(expression); - } - } else { - if (isExclusion) { - result.remove(expression.substring(1)); - } else { - result.add(expression); - } - } - } - return result; - } - private static IndexMetadata.State excludeState(IndicesOptions options) { final IndexMetadata.State excludeState; if (options.expandWildcardsOpen() && options.expandWildcardsClosed()) { @@ -1366,55 +1380,82 @@ private static IndexMetadata.State excludeState(IndicesOptions options) { } /** - * Given a single wildcard {@param expression}, return the {@code Stream} that contains all the resources (i.e. indices, aliases, - * and datastreams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's + * Given a single wildcard {@param expression}, return a {@code Set} that contains all the resources (i.e. indices, aliases, + * and data streams), that exist in the cluster at this moment in time, and that the wildcard "resolves" to (i.e. the resource's * name matches the {@param expression} wildcard). * The {@param context} provides the current time-snapshot view of cluster state, as well as conditions - * on whether to consider alias, datastream, system, and hidden resources. - * It does NOT consider the open or closed status of index resources. + * on whether to consider alias, data stream, system, and hidden resources. */ - private static Stream matchResourcesToWildcard(Context context, String wildcardExpression) { + static Set matchWildcardToResources(Context context, String wildcardExpression) { assert isWildcard(wildcardExpression); final SortedMap indicesLookup = context.getState().getMetadata().getIndicesLookup(); - Stream matchesStream; + Set matchedResources = new HashSet<>(); + // this applies an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" if (Regex.isSuffixMatchPattern(wildcardExpression)) { - // this is an initial pre-filtering in the case where the expression is a common suffix wildcard, eg "test*" - matchesStream = filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values().stream(); - } else { - matchesStream = indicesLookup.values().stream(); - if (Regex.isMatchAllPattern(wildcardExpression) == false) { - matchesStream = matchesStream.filter( - indexAbstraction -> Regex.simpleMatch(wildcardExpression, indexAbstraction.getName()) - ); + for (IndexAbstraction ia : filterIndicesLookupForSuffixWildcard(indicesLookup, wildcardExpression).values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); + } + return matchedResources; + } + // In case of match all it fetches all index abstractions + if (Regex.isMatchAllPattern(wildcardExpression)) { + for (IndexAbstraction ia : indicesLookup.values()) { + maybeAddToResult(context, wildcardExpression, ia, matchedResources); } + return matchedResources; } - if (context.getOptions().ignoreAliases()) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.getType() != Type.ALIAS); + for (IndexAbstraction indexAbstraction : indicesLookup.values()) { + if (Regex.simpleMatch(wildcardExpression, indexAbstraction.getName())) { + maybeAddToResult(context, wildcardExpression, indexAbstraction, matchedResources); + } } - if (context.includeDataStreams() == false) { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isDataStreamRelated() == false); + return matchedResources; + } + + private static void maybeAddToResult( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction, + Set matchedResources + ) { + if (shouldExpandToIndexAbstraction(context, wildcardExpression, indexAbstraction)) { + matchedResources.addAll(expandToOpenClosed(context, indexAbstraction)); } - // historic, i.e. not net-new, system indices are included irrespective of the system access predicate - // the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isSystem() == false - || (indexAbstraction.getType() != Type.DATA_STREAM - && indexAbstraction.getParentDataStream() == null - && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false) - || context.systemIndexAccessPredicate.test(indexAbstraction.getName()) - ); + } + + /** + * Checks if this index abstraction should be included because it matched the wildcard expression. + * @param context the options of this request that influence the decision if this index abstraction should be included in the result + * @param wildcardExpression the wildcard expression that matched this index abstraction + * @param indexAbstraction the index abstraction in question + * @return true, if the index abstraction should be included in the result + */ + private static boolean shouldExpandToIndexAbstraction( + Context context, + String wildcardExpression, + IndexAbstraction indexAbstraction + ) { + if (context.getOptions().ignoreAliases() && indexAbstraction.getType() == Type.ALIAS) { + return false; + } + if (context.includeDataStreams() == false && indexAbstraction.isDataStreamRelated()) { + return false; + } + + if (indexAbstraction.isSystem() + && SystemResourceAccess.shouldExpandToSystemIndexAbstraction(context, indexAbstraction) == false) { + return false; + } + if (context.getOptions().expandWildcardsHidden() == false) { - if (wildcardExpression.startsWith(".")) { - // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also - // starts with "." - matchesStream = matchesStream.filter( - indexAbstraction -> indexAbstraction.isHidden() == false || indexAbstraction.getName().startsWith(".") - ); - } else { - matchesStream = matchesStream.filter(indexAbstraction -> indexAbstraction.isHidden() == false); + // there is this behavior that hidden indices that start with "." are not hidden if the wildcard expression also + // starts with "." + if (indexAbstraction.isHidden() + && (wildcardExpression.startsWith(".") && indexAbstraction.getName().startsWith(".")) == false) { + return false; } } - return matchesStream; + return true; } private static Map filterIndicesLookupForSuffixWildcard( @@ -1430,35 +1471,39 @@ private static Map filterIndicesLookupForSuffixWildcar } /** - * Return the {@code Stream} of open and/or closed index names for the given {@param resources}. + * Return the {@code Set} of open and/or closed index names for the given {@param resources}. * Data streams and aliases are interpreted to refer to multiple indices, * then all index resources are filtered by their open/closed status. */ - private static Stream expandToOpenClosed(Context context, Stream resources) { + private static Set expandToOpenClosed(Context context, IndexAbstraction indexAbstraction) { final IndexMetadata.State excludeState = excludeState(context.getOptions()); - return resources.flatMap(indexAbstraction -> { - if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { - return Stream.of(indexAbstraction.getName()); - } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { - return Stream.of(indexAbstraction.getName()); - } else { - Stream indicesStateStream = Stream.of(); - if (shouldIncludeRegularIndices(context.getOptions())) { - indicesStateStream = indexAbstraction.getIndices().stream().map(context.state.metadata()::index); - } - if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { - DataStream dataStream = (DataStream) indexAbstraction; - indicesStateStream = Stream.concat( - indicesStateStream, - dataStream.getFailureIndices().getIndices().stream().map(context.state.metadata()::index) - ); + Set resources = new HashSet<>(); + if (context.isPreserveAliases() && indexAbstraction.getType() == Type.ALIAS) { + resources.add(indexAbstraction.getName()); + } else if (context.isPreserveDataStreams() && indexAbstraction.getType() == Type.DATA_STREAM) { + resources.add(indexAbstraction.getName()); + } else { + if (shouldIncludeRegularIndices(context.getOptions())) { + for (int i = 0, n = indexAbstraction.getIndices().size(); i < n; i++) { + Index index = indexAbstraction.getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - if (excludeState != null) { - indicesStateStream = indicesStateStream.filter(indexMeta -> indexMeta.getState() != excludeState); + } + if (indexAbstraction.getType() == Type.DATA_STREAM && shouldIncludeFailureIndices(context.getOptions())) { + DataStream dataStream = (DataStream) indexAbstraction; + for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { + Index index = dataStream.getFailureIndices().getIndices().get(i); + IndexMetadata indexMetadata = context.state.metadata().index(index); + if (indexMetadata.getState() != excludeState) { + resources.add(index.getName()); + } } - return indicesStateStream.map(indexMeta -> indexMeta.getIndex().getName()); } - }); + } + return resources; } private static List resolveEmptyOrTrivialWildcard(Context context) { @@ -1471,26 +1516,26 @@ private static List resolveEmptyOrTrivialWildcard(Context context) { } private static List resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context context, String[] allIndices) { - return Arrays.stream(allIndices).filter(name -> { - if (name.startsWith(".")) { - IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); - assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; - if (abstraction.isSystem()) { - if (context.netNewSystemIndexPredicate.test(name)) { - if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { - return false; - } else { - return context.systemIndexAccessPredicate.test(name); - } - } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { - return context.systemIndexAccessPredicate.test(name); - } - } else { - return true; - } + List filteredIndices = new ArrayList<>(allIndices.length); + for (int i = 0; i < allIndices.length; i++) { + if (shouldIncludeIndexAbstraction(context, allIndices[i])) { + filteredIndices.add(allIndices[i]); } + } + return filteredIndices; + } + + private static boolean shouldIncludeIndexAbstraction(Context context, String name) { + if (name.startsWith(".") == false) { return true; - }).toList(); + } + + IndexAbstraction abstraction = context.state.metadata().getIndicesLookup().get(name); + assert abstraction != null : "null abstraction for " + name + " but was in array of all indices"; + if (abstraction.isSystem() == false) { + return true; + } + return SystemResourceAccess.isSystemIndexAbstractionAccessible(context, abstraction); } private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions options, Metadata metadata) { @@ -1513,8 +1558,39 @@ private static String[] resolveEmptyOrTrivialWildcardToAllIndices(IndicesOptions return Strings.EMPTY_ARRAY; } } + + static boolean isWildcard(String expression) { + return Regex.isSimpleMatchPattern(expression); + } + + static boolean hasWildcards(String[] expressions) { + for (int i = 0; i < expressions.length; i++) { + if (isWildcard(expressions[i])) { + return true; + } + } + return false; + } + } + + /** + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression) { + return DateMathExpressionResolver.resolveExpression(dateExpression); + } + + /** + * @param time instant to consider when parsing the expression + * @return If the specified string is data math expression then this method returns the resolved expression. + */ + public static String resolveDateMathExpression(String dateExpression, long time) { + return DateMathExpressionResolver.resolveExpression(dateExpression, () -> time); } + /** + * Resolves a date math expression based on the requested time. + */ public static final class DateMathExpressionResolver { private static final DateFormatter DEFAULT_DATE_FORMATTER = DateFormatter.forPattern("uuuu.MM.dd"); @@ -1530,35 +1606,18 @@ private DateMathExpressionResolver() { } /** - * Resolves date math expressions. If this is a noop the given {@code expressions} list is returned without copying. - * As a result callers of this method should not mutate the returned list. Mutating it may come with unexpected side effects. + * Resolves a date math expression using the current time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. */ - public static List resolve(Context context, List expressions) { - boolean wildcardSeen = false; - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - String[] result = null; - for (int i = 0, n = expressions.size(); i < n; i++) { - String expression = expressions.get(i); - // accepts date-math exclusions that are of the form "-<...{}>",f i.e. the "-" is outside the "<>" date-math template - boolean isExclusion = wildcardSeen && expression.startsWith("-"); - wildcardSeen = wildcardSeen || (expandWildcards && isWildcard(expression)); - String toResolve = isExclusion ? expression.substring(1) : expression; - String resolved = resolveExpression(toResolve, context::getStartTime); - if (toResolve != resolved) { - if (result == null) { - result = expressions.toArray(Strings.EMPTY_ARRAY); - } - result[i] = isExclusion ? "-" + resolved : resolved; - } - } - return result == null ? expressions : Arrays.asList(result); - } - - static String resolveExpression(String expression) { + public static String resolveExpression(String expression) { return resolveExpression(expression, System::currentTimeMillis); } - static String resolveExpression(String expression, LongSupplier getTime) { + /** + * Resolves a date math expression using the provided time. This method recognises a date math expression iff when they start with + * %3C and end with %3E. Otherwise, it returns the expression intact. + */ + public static String resolveExpression(String expression, LongSupplier getTime) { if (expression.startsWith(EXPRESSION_LEFT_BOUND) == false || expression.endsWith(EXPRESSION_RIGHT_BOUND) == false) { return expression; } @@ -1707,135 +1766,133 @@ private static String doResolveExpression(String expression, LongSupplier getTim } } - public static final class ExplicitResourceNameFilter { + /** + * In this class we collect the system access relevant code. The helper methods provide the following functionalities: + * - determining the access to a system index abstraction + * - verifying the access to system abstractions and adding the necessary warnings + * - determining the access to a system index based on its name + * WARNING: we have observed differences in how the access is determined. For now this behaviour is documented and preserved. + */ + public static final class SystemResourceAccess { - private ExplicitResourceNameFilter() { + private SystemResourceAccess() { // Utility class } /** - * Returns an expression list with "unavailable" (missing or not acceptable) resource names filtered out. - * Only explicit resource names are considered for filtering. Wildcard and exclusion expressions are kept in. + * Checks if this system index abstraction should be included when resolving via {@link + * IndexNameExpressionResolver.WildcardExpressionResolver#resolveEmptyOrTrivialWildcardWithAllowedSystemIndices(Context, String[])}. + * NOTE: it behaves differently than {@link SystemResourceAccess#shouldExpandToSystemIndexAbstraction(Context, IndexAbstraction)} + * because in the case that the access level is BACKWARDS_COMPATIBLE_ONLY it does not include the net-new indices, this is + * questionable. */ - public static List filterUnavailable(Context context, List expressions) { - ensureRemoteIndicesRequireIgnoreUnavailable(context.getOptions(), expressions); - final boolean expandWildcards = context.getOptions().expandWildcardExpressions(); - boolean wildcardSeen = false; - List result = null; - for (int i = 0; i < expressions.size(); i++) { - String expression = expressions.get(i); - if (Strings.isEmpty(expression)) { - throw notFoundException(expression); - } - // Expressions can not start with an underscore. This is reserved for APIs. If the check gets here, the API - // does not exist and the path is interpreted as an expression. If the expression begins with an underscore, - // throw a specific error that is different from the [[IndexNotFoundException]], which is typically thrown - // if the expression can't be found. - if (expression.charAt(0) == '_') { - throw new InvalidIndexNameException(expression, "must not start with '_'."); - } - final boolean isWildcard = expandWildcards && isWildcard(expression); - if (isWildcard || (wildcardSeen && expression.charAt(0) == '-') || ensureAliasOrIndexExists(context, expression)) { - if (result != null) { - result.add(expression); - } + public static boolean isSystemIndexAbstractionAccessible(Context context, IndexAbstraction abstraction) { + assert abstraction.isSystem() : "We should only check this for system resources"; + if (context.netNewSystemIndexPredicate.test(abstraction.getName())) { + if (SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY.equals(context.systemIndexAccessLevel)) { + return false; } else { - if (result == null) { - result = new ArrayList<>(expressions.size() - 1); - result.addAll(expressions.subList(0, i)); - } + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - wildcardSeen |= isWildcard; + } else if (abstraction.getType() == Type.DATA_STREAM || abstraction.getParentDataStream() != null) { + return context.systemIndexAccessPredicate.test(abstraction.getName()); } - return result == null ? expressions : result; + return true; } /** - * This returns `true` if the given {@param name} is of a resource that exists. - * Otherwise, it returns `false` if the `ignore_unvailable` option is `true`, or, if `false`, it throws a "not found" type of - * exception. + * Historic, i.e. not net-new, system indices are included irrespective of the system access predicate + * the system access predicate is based on the endpoint kind and HTTP request headers that identify the stack feature. + * A historic system resource, can only be an index since system data streams were added later. */ - @Nullable - private static boolean ensureAliasOrIndexExists(Context context, String name) { - boolean ignoreUnavailable = context.getOptions().ignoreUnavailable(); - IndexAbstraction indexAbstraction = context.getState().getMetadata().getIndicesLookup().get(name); - if (indexAbstraction == null) { - if (ignoreUnavailable) { - return false; - } else { - throw notFoundException(name); - } - } - // treat aliases as unavailable indices when ignoreAliases is set to true (e.g. delete index and update aliases api) - if (indexAbstraction.getType() == Type.ALIAS && context.getOptions().ignoreAliases()) { - if (ignoreUnavailable) { - return false; - } else { - throw aliasesNotSupportedException(name); - } - } - if (indexAbstraction.isDataStreamRelated() && context.includeDataStreams() == false) { - if (ignoreUnavailable) { - return false; - } else { - IndexNotFoundException infe = notFoundException(name); - // Allows callers to handle IndexNotFoundException differently based on whether data streams were excluded. - infe.addMetadata(EXCLUDED_DATA_STREAMS_KEY, "true"); - throw infe; - } - } - return true; + private static boolean shouldExpandToSystemIndexAbstraction(Context context, IndexAbstraction indexAbstraction) { + assert indexAbstraction.isSystem() : "We should only check this for system resources"; + boolean isHistoric = indexAbstraction.getType() != Type.DATA_STREAM + && indexAbstraction.getParentDataStream() == null + && context.netNewSystemIndexPredicate.test(indexAbstraction.getName()) == false; + return isHistoric || context.systemIndexAccessPredicate.test(indexAbstraction.getName()); } - private static void ensureRemoteIndicesRequireIgnoreUnavailable(IndicesOptions options, List indexExpressions) { - if (options.ignoreUnavailable()) { + /** + * Checks if any system indices that should not have been accessible according to the + * {@link Context#getSystemIndexAccessPredicate()} are accessed, and it performs the following actions: + * - if there are historic (aka not net-new) system indices, then it adds a deprecation warning + * - if it contains net-new system indices or system data streams, it throws an exception. + */ + private static void checkSystemIndexAccess(Context context, ThreadContext threadContext, Index... concreteIndices) { + final Predicate systemIndexAccessPredicate = context.getSystemIndexAccessPredicate(); + if (systemIndexAccessPredicate == Predicates.always()) { return; } - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - failOnRemoteIndicesNotIgnoringUnavailable(indexExpressions); - } - } + doCheckSystemIndexAccess(context, systemIndexAccessPredicate, threadContext, concreteIndices); } - private static void failOnRemoteIndicesNotIgnoringUnavailable(List indexExpressions) { - List crossClusterIndices = new ArrayList<>(); - for (String index : indexExpressions) { - if (RemoteClusterAware.isRemoteIndexName(index)) { - crossClusterIndices.add(index); + private static void doCheckSystemIndexAccess( + Context context, + Predicate systemIndexAccessPredicate, + ThreadContext threadContext, + Index... concreteIndices + ) { + final Metadata metadata = context.getState().metadata(); + final List resolvedSystemIndices = new ArrayList<>(); + final List resolvedNetNewSystemIndices = new ArrayList<>(); + final Set resolvedSystemDataStreams = new HashSet<>(); + final SortedMap indicesLookup = metadata.getIndicesLookup(); + boolean matchedIndex = false; + for (int i = 0; i < concreteIndices.length; i++) { + Index concreteIndex = concreteIndices[i]; + IndexMetadata idxMetadata = metadata.index(concreteIndex); + String name = concreteIndex.getName(); + if (idxMetadata.isSystem() && systemIndexAccessPredicate.test(name) == false) { + matchedIndex = true; + IndexAbstraction indexAbstraction = indicesLookup.get(name); + if (indexAbstraction.getParentDataStream() != null) { + resolvedSystemDataStreams.add(indexAbstraction.getParentDataStream().getName()); + } else if (context.netNewSystemIndexPredicate.test(name)) { + resolvedNetNewSystemIndices.add(name); + } else { + resolvedSystemIndices.add(name); + } } } - throw new IllegalArgumentException( - "Cross-cluster calls are not supported in this context but remote indices were requested: " + crossClusterIndices - ); - } - } - - /** - * This is a context for the DateMathExpressionResolver which does not require {@code IndicesOptions} or {@code ClusterState} - * since it uses only the start time to resolve expressions. - */ - public static final class ResolverContext extends Context { - public ResolverContext() { - this(System.currentTimeMillis()); - } - - public ResolverContext(long startTime) { - super(null, null, startTime, false, false, false, false, SystemIndexAccessLevel.ALL, Predicates.never(), Predicates.never()); + if (matchedIndex) { + handleMatchedSystemIndices(resolvedSystemIndices, resolvedSystemDataStreams, resolvedNetNewSystemIndices, threadContext); + } } - @Override - public ClusterState getState() { - throw new UnsupportedOperationException("should never be called"); + private static void handleMatchedSystemIndices( + List resolvedSystemIndices, + Set resolvedSystemDataStreams, + List resolvedNetNewSystemIndices, + ThreadContext threadContext + ) { + if (resolvedSystemIndices.isEmpty() == false) { + Collections.sort(resolvedSystemIndices); + deprecationLogger.warn( + DeprecationCategory.API, + "open_system_index_access", + "this request accesses system indices: {}, but in a future major version, direct access to system " + + "indices will be prevented by default", + resolvedSystemIndices + ); + } + if (resolvedSystemDataStreams.isEmpty() == false) { + throw SystemIndices.dataStreamAccessException(threadContext, resolvedSystemDataStreams); + } + if (resolvedNetNewSystemIndices.isEmpty() == false) { + throw SystemIndices.netNewSystemIndexAccessException(threadContext, resolvedNetNewSystemIndices); + } } - @Override - public IndicesOptions getOptions() { - throw new UnsupportedOperationException("should never be called"); + /** + * Used in {@link IndexNameExpressionResolver#shouldTrackConcreteIndex(Context, Index)} to exclude net-new indices + * when we are in backwards compatible only access level. + * This also feels questionable as well. + */ + private static boolean isNetNewInBackwardCompatibleMode(Context context, Index index) { + return context.systemIndexAccessLevel == SystemIndexAccessLevel.BACKWARDS_COMPATIBLE_ONLY + && context.netNewSystemIndexPredicate.test(index.getName()); } } - private static boolean isWildcard(String expression) { - return Regex.isSimpleMatchPattern(expression); - } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java index 6be5b48f9d723..57c360dc6a92a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DateMathExpressionResolverTests.java @@ -10,163 +10,90 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.Context; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver.DateMathExpressionResolver; -import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; import java.time.Instant; import java.time.ZoneId; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; import java.util.Locale; +import java.util.function.LongSupplier; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateMathExpressionResolverTests extends ESTestCase { - private final Context context = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictExpand(), - SystemIndexAccessLevel.NONE - ); + private final long now = randomMillisUpToYear9999(); + private final LongSupplier getTime = () -> now; - private static ZonedDateTime dateFromMillis(long millis) { - return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); - } + public void testNoDateMathExpression() { + String expression = randomAlphaOfLength(10); + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); - private static String formatDate(String pattern, ZonedDateTime zonedDateTime) { - DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); - return dateFormatter.format(zonedDateTime); + expression = "*"; + assertThat(DateMathExpressionResolver.resolveExpression(expression, getTime), equalTo(expression)); } - public void testNormal() throws Exception { - int numIndexExpressions = randomIntBetween(1, 9); - List indexExpressions = new ArrayList<>(numIndexExpressions); - for (int i = 0; i < numIndexExpressions; i++) { - indexExpressions.add(randomAlphaOfLength(10)); - } - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(indexExpressions.size())); - for (int i = 0; i < indexExpressions.size(); i++) { - assertThat(result.get(i), equalTo(indexExpressions.get(i))); - } - } + public void testExpression() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testExpression() throws Exception { - List indexExpressions = Arrays.asList("<.marvel-{now}>", "<.watch_history-{now}>", ""); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat(result.size(), equalTo(3)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(1), equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + result = DateMathExpressionResolver.resolveExpression("<.watch_history-{now}>", getTime); + assertThat(result, equalTo(".watch_history-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("logstash-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } public void testExpressionWithWildcardAndExclusions() { - List indexExpressions = Arrays.asList( - "<-before-inner-{now}>", - "-", - "", - "<-after-inner-{now}>", - "-" - ); - List result = DateMathExpressionResolver.resolve(context, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-"), // doesn't evaluate because it doesn't start with "<" and it is not an exclusion - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - equalTo("-after-outer-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))) - ) - ); - Context noWildcardExpandContext = new Context( - ClusterState.builder(new ClusterName("_name")).build(), - IndicesOptions.strictSingleIndexNoExpandForbidClosed(), - SystemIndexAccessLevel.NONE - ); - result = DateMathExpressionResolver.resolve(noWildcardExpandContext, indexExpressions); - assertThat( - result, - Matchers.contains( - equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-"), - equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + "*"), - equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()))), - // doesn't evaluate because it doesn't start with "<" and there can't be exclusions without wildcard expansion - equalTo("-") - ) - ); - } + String result = DateMathExpressionResolver.resolveExpression("<-before-inner-{now}>", getTime); + assertThat(result, equalTo("-before-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); + + result = DateMathExpressionResolver.resolveExpression("", getTime); + assertThat(result, equalTo("wild*card-" + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "*")); + + result = DateMathExpressionResolver.resolveExpression("<-after-inner-{now}>", getTime); + assertThat(result, equalTo("-after-inner-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); - public void testEmpty() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Collections.emptyList()); - assertThat(result.size(), equalTo(0)); } - public void testExpression_Static() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-test>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-test")); + public void testExpression_Static() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-test>", getTime); + assertThat(result, equalTo(".marvel-test")); } - public void testExpression_MultiParts() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.text1-{now/d}-text2-{now/M}>")); - assertThat(result.size(), equalTo(1)); + public void testExpression_MultiParts() { + String result = DateMathExpressionResolver.resolveExpression("<.text1-{now/d}-text2-{now/M}>", getTime); assertThat( - result.get(0), + result, equalTo( ".text1-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())) + + formatDate("uuuu.MM.dd", dateFromMillis(now)) + "-text2-" - + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime()).withDayOfMonth(1)) + + formatDate("uuuu.MM.dd", dateFromMillis(now).withDayOfMonth(1)) ) ); } - public void testExpression_CustomFormat() throws Exception { - List results = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{yyyy.MM.dd}}>")); - assertThat(results.size(), equalTo(1)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - } - - public void testExpression_EscapeStatic() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar\\{v\\}el-{now/d}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); + public void testExpression_CustomFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{yyyy.MM.dd}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_EscapeDateFormat() throws Exception { - List result = DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{'\\{year\\}'yyyy}}>")); - assertThat(result.size(), equalTo(1)); - assertThat(result.get(0), equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(context.getStartTime())))); + public void testExpression_EscapeStatic() { + String result = DateMathExpressionResolver.resolveExpression("<.mar\\{v\\}el-{now/d}>", getTime); + assertThat(result, equalTo(".mar{v}el-" + formatDate("uuuu.MM.dd", dateFromMillis(now)))); } - public void testExpression_MixedArray() throws Exception { - List result = DateMathExpressionResolver.resolve( - context, - Arrays.asList("name1", "<.marvel-{now/d}>", "name2", "<.logstash-{now/M{uuuu.MM}}>") - ); - assertThat(result.size(), equalTo(4)); - assertThat(result.get(0), equalTo("name1")); - assertThat(result.get(1), equalTo(".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(context.getStartTime())))); - assertThat(result.get(2), equalTo("name2")); - assertThat(result.get(3), equalTo(".logstash-" + formatDate("uuuu.MM", dateFromMillis(context.getStartTime()).withDayOfMonth(1)))); + public void testExpression_EscapeDateFormat() { + String result = DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{'\\{year\\}'yyyy}}>", getTime); + assertThat(result, equalTo(".marvel-" + formatDate("'{year}'yyyy", dateFromMillis(now)))); } - public void testExpression_CustomTimeZoneInIndexName() throws Exception { + public void testExpression_CustomTimeZoneInIndexName() { ZoneId timeZone; int hoursOffset; int minutesOffset = 0; @@ -194,57 +121,57 @@ public void testExpression_CustomTimeZoneInIndexName() throws Exception { // rounding to today 00:00 now = ZonedDateTime.now(ZoneOffset.UTC).withHour(0).withMinute(0).withSecond(0); } - Context context = new Context( - this.context.getState(), - this.context.getOptions(), - now.toInstant().toEpochMilli(), - SystemIndexAccessLevel.NONE, - name -> false, - name -> false - ); - List results = DateMathExpressionResolver.resolve( - context, - Arrays.asList("<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>") + + String result = DateMathExpressionResolver.resolveExpression( + "<.marvel-{now/d{yyyy.MM.dd|" + timeZone.getId() + "}}>", + () -> now.toInstant().toEpochMilli() ); - assertThat(results.size(), equalTo(1)); - logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, results.get(0)); - assertThat(results.get(0), equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); + logger.info("timezone: [{}], now [{}], name: [{}]", timeZone, now, result); + assertThat(result, equalTo(".marvel-" + formatDate("uuuu.MM.dd", now.withZoneSameInstant(timeZone)))); } - public void testExpressionInvalidUnescaped() throws Exception { + public void testExpressionInvalidUnescaped() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.mar}vel-{now/d}>")) + () -> DateMathExpressionResolver.resolveExpression("<.mar}vel-{now/d}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("invalid character at position [")); } - public void testExpressionInvalidDateMathFormat() throws Exception { + public void testExpressionInvalidDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } - public void testExpressionInvalidEmptyDateMathFormat() throws Exception { + public void testExpressionInvalidEmptyDateMathFormat() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d{}}>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d{}}>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("missing date format")); } - public void testExpressionInvalidOpenEnded() throws Exception { + public void testExpressionInvalidOpenEnded() { Exception e = expectThrows( ElasticsearchParseException.class, - () -> DateMathExpressionResolver.resolve(context, Arrays.asList("<.marvel-{now/d>")) + () -> DateMathExpressionResolver.resolveExpression("<.marvel-{now/d>", getTime) ); assertThat(e.getMessage(), containsString("invalid dynamic name expression")); assertThat(e.getMessage(), containsString("date math placeholder is open ended")); } + static ZonedDateTime dateFromMillis(long millis) { + return ZonedDateTime.ofInstant(Instant.ofEpochMilli(millis), ZoneOffset.UTC); + } + + static String formatDate(String pattern, ZonedDateTime zonedDateTime) { + DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern(pattern, Locale.ROOT); + return dateFormatter.format(zonedDateTime); + } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java index 99470918ce063..30895767c33c2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolverTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Predicates; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -47,6 +48,7 @@ import java.time.LocalDate; import java.time.ZoneOffset; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -58,6 +60,8 @@ import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFailureStore; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.dateFromMillis; +import static org.elasticsearch.cluster.metadata.DateMathExpressionResolverTests.formatDate; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_HIDDEN_SETTING; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.elasticsearch.indices.SystemIndices.EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY; @@ -885,10 +889,7 @@ public void testConcreteIndicesIgnoreIndicesEmptyRequest() { IndicesOptions.lenientExpandOpen(), SystemIndexAccessLevel.NONE ); - assertThat( - newHashSet(indexNameExpressionResolver.concreteIndexNames(context, new String[] {})), - equalTo(newHashSet("kuku", "testXXX")) - ); + assertThat(newHashSet(indexNameExpressionResolver.concreteIndexNames(context)), equalTo(newHashSet("kuku", "testXXX"))); } public void testConcreteIndicesNoIndicesErrorMessage() { @@ -1408,52 +1409,56 @@ public void testConcreteIndicesWildcardNoMatch() { } } - public void testIsAllIndicesNull() throws Exception { + public void testIsAllIndicesNull() { assertThat(IndexNameExpressionResolver.isAllIndices(null), equalTo(true)); } - public void testIsAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Collections.emptyList()), equalTo(true)); + public void testIsAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of()), equalTo(true)); + } + + public void testIsAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all")), equalTo(true)); } - public void testIsAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all")), equalTo(true)); + public void testIsAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("_all", "other")), equalTo(false)); } - public void testIsAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsNoneIndices() { + assertThat(IndexNameExpressionResolver.isNoneExpression(new String[] { "*", "-*" }), equalTo(true)); } - public void testIsAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isAllIndices(Arrays.asList("*")), equalTo(false)); + public void testIsAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isAllIndices(List.of("*")), equalTo(false)); } - public void testIsExplicitAllIndicesNull() throws Exception { + public void testIsExplicitAllIndicesNull() { assertThat(IndexNameExpressionResolver.isExplicitAllPattern(null), equalTo(false)); } - public void testIsExplicitAllIndicesEmpty() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Collections.emptyList()), equalTo(false)); + public void testIsExplicitAllIndicesEmpty() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of()), equalTo(false)); } - public void testIsExplicitAllIndicesExplicitAll() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all")), equalTo(true)); + public void testIsExplicitAllIndicesExplicitAll() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all")), equalTo(true)); } - public void testIsExplicitAllIndicesExplicitAllPlusOther() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("_all", "other")), equalTo(false)); + public void testIsExplicitAllIndicesExplicitAllPlusOther() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("_all", "other")), equalTo(false)); } - public void testIsExplicitAllIndicesNormalIndexes() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("index1", "index2", "index3")), equalTo(false)); + public void testIsExplicitAllIndicesNormalIndexes() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("index1", "index2", "index3")), equalTo(false)); } - public void testIsExplicitAllIndicesWildcard() throws Exception { - assertThat(IndexNameExpressionResolver.isExplicitAllPattern(Arrays.asList("*")), equalTo(false)); + public void testIsExplicitAllIndicesWildcard() { + assertThat(IndexNameExpressionResolver.isExplicitAllPattern(List.of("*")), equalTo(false)); } public void testIndexOptionsFailClosedIndicesAndAliases() { @@ -1580,16 +1585,13 @@ public void testResolveExpressions() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - assertEquals(new HashSet<>(Arrays.asList("alias-0", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "alias-*")); + assertEquals(Set.of("test-0", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*")); assertEquals( - new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")), - indexNameExpressionResolver.resolveExpressions(state, "test-0", "alias-*") - ); - assertEquals( - new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")), + Set.of("test-0", "test-1", "alias-0", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "test-*", "alias-*") ); - assertEquals(new HashSet<>(Arrays.asList("test-1", "alias-1")), indexNameExpressionResolver.resolveExpressions(state, "*-1")); + assertEquals(Set.of("test-1", "alias-1"), indexNameExpressionResolver.resolveExpressions(state, "*-1")); } public void testFilteringAliases() { @@ -1598,16 +1600,16 @@ public void testFilteringAliases() { .put(indexBuilder("test-1").state(State.OPEN).putAlias(AliasMetadata.builder("alias-1"))); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("alias-0", "alias-1")); + Set resolvedExpressions = Set.of("alias-0", "alias-1"); String[] strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertArrayEquals(new String[] { "alias-0" }, strings); // concrete index supersedes filtering alias - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); - resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-1", "alias-0", "alias-1")); + resolvedExpressions = Set.of("test-0", "test-1", "alias-0", "alias-1"); strings = indexNameExpressionResolver.filteringAliases(state, "test-0", resolvedExpressions); assertNull(strings); } @@ -1742,7 +1744,7 @@ public void testIndexAliasesSkipIdentity() { ); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - Set resolvedExpressions = new HashSet<>(Arrays.asList("test-0", "test-alias")); + Set resolvedExpressions = Set.of("test-0", "test-alias"); String[] aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, false, resolvedExpressions); assertNull(aliases); aliases = indexNameExpressionResolver.indexAliases(state, "test-0", x -> true, x -> true, true, resolvedExpressions); @@ -1769,7 +1771,7 @@ public void testConcreteWriteIndexSuccessful() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1851,7 +1853,7 @@ public void testConcreteWriteIndexWithWildcardExpansion() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1889,7 +1891,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithSingleIndex() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-alias")) + Set.of("test-0", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1925,7 +1927,7 @@ public void testConcreteWriteIndexWithNoWriteIndexWithMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -1966,7 +1968,7 @@ public void testAliasResolutionNotAllowingMultipleIndices() { x -> true, x -> true, true, - new HashSet<>(Arrays.asList("test-0", "test-1", "test-alias")) + Set.of("test-0", "test-1", "test-alias") ); Arrays.sort(strings); assertArrayEquals(new String[] { "test-alias" }, strings); @@ -2328,40 +2330,40 @@ public void testFullWildcardSystemIndexResolutionWithExpandHiddenAllowed() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); } public void testWildcardSystemIndexResolutionMultipleMatchesAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); } public void testWildcardSystemIndexResolutionSingleMatchAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); } public void testSingleSystemIndexResolutionAllowed() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta")); } public void testFullWildcardSystemIndicesAreHidden() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(randomFrom("*", "_all")); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining("some-other-index")); } public void testFullWildcardSystemIndexResolutionDeprecated() { @@ -2370,8 +2372,8 @@ public void testFullWildcardSystemIndexResolutionDeprecated() { SearchRequest request = new SearchRequest(randomFrom("*", "_all")); request.indicesOptions(IndicesOptions.strictExpandHidden()); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder("some-other-index", ".ml-stuff", ".ml-meta", ".watches")); assertWarnings( true, new DeprecationWarning( @@ -2388,8 +2390,8 @@ public void testSingleSystemIndexResolutionDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-meta"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".ml-meta")); assertWarnings( true, new DeprecationWarning( @@ -2405,8 +2407,8 @@ public void testWildcardSystemIndexResolutionSingleMatchDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".w*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".watches")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".watches")); assertWarnings( true, new DeprecationWarning( @@ -2423,8 +2425,8 @@ public void testWildcardSystemIndexResolutionMultipleMatchesDeprecated() { ClusterState state = systemIndexTestClusterState(); SearchRequest request = new SearchRequest(".ml-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, containsInAnyOrder(".ml-meta", ".ml-stuff")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContainingInAnyOrder(".ml-meta", ".ml-stuff")); assertWarnings( true, new DeprecationWarning( @@ -2479,8 +2481,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2496,8 +2498,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, Boolean.FALSE.toString()); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings( true, new DeprecationWarning( @@ -2515,8 +2517,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2526,8 +2528,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "stack-component"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2538,8 +2540,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-*"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -2549,8 +2551,8 @@ public void testExternalSystemIndexAccess() { threadContext.putHeader(EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY, "other"); SearchRequest request = new SearchRequest(".external-sys-idx"); - List indexNames = resolveConcreteIndexNameList(state, request); - assertThat(indexNames, contains(".external-sys-idx")); + String[] indexNames = indexNameExpressionResolver.concreteIndexNames(state, request); + assertThat(indexNames, arrayContaining(".external-sys-idx")); assertWarnings(); } } @@ -3073,7 +3075,6 @@ public void testDataStreamsWithWildcardExpression() { assertThat(result[1].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream1, 2, epochMillis))); assertThat(result[2].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 1, epochMillis))); assertThat(result[3].getName(), equalTo(DataStream.getDefaultBackingIndexName(dataStream2, 2, epochMillis))); - ; } { IndicesOptions indicesOptions = IndicesOptions.STRICT_EXPAND_OPEN; @@ -3239,6 +3240,37 @@ public void testDataStreamsNames() { assertThat(names, empty()); } + public void testDateMathMixedArray() { + long now = System.currentTimeMillis(); + String dataMathIndex1 = ".marvel-" + formatDate("uuuu.MM.dd", dateFromMillis(now)); + String dateMathIndex2 = ".logstash-" + formatDate("uuuu.MM", dateFromMillis(now).withDayOfMonth(1)); + IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( + ClusterState.builder(new ClusterName("_name")) + .metadata( + Metadata.builder() + .put(indexBuilder("name1")) + .put(indexBuilder("name2")) + .put(indexBuilder(dataMathIndex1)) + .put(indexBuilder(dateMathIndex2)) + ) + .build(), + IndicesOptions.strictExpand(), + now, + SystemIndexAccessLevel.NONE, + Predicates.never(), + Predicates.never() + ); + Collection result = IndexNameExpressionResolver.resolveExpressionsToResources( + context, + "name1", + "<.marvel-{now/d}>", + "name2", + "<.logstash-{now/M{uuuu.MM}}>" + ); + assertThat(result.size(), equalTo(4)); + assertThat(result, contains("name1", dataMathIndex1, "name2", dateMathIndex2)); + } + public void testMathExpressionSupport() { Instant instant = LocalDate.of(2021, 01, 11).atStartOfDay().toInstant(ZoneOffset.UTC); String resolved = IndexNameExpressionResolver.resolveDateMathExpression("", instant.toEpochMilli()); @@ -3418,10 +3450,6 @@ private ClusterState systemIndexTestClusterState() { return ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); } - private List resolveConcreteIndexNameList(ClusterState state, SearchRequest request) { - return Arrays.stream(indexNameExpressionResolver.concreteIndices(state, request)).map(Index::getName).toList(); - } - private static IndexMetadata.Builder indexBuilder(String index, Settings additionalSettings) { return IndexMetadata.builder(index).settings(indexSettings(IndexVersion.current(), 1, 0).put(additionalSettings)); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java index 982394ca31b1c..6a26e7948784c 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/WildcardExpressionResolverTests.java @@ -13,23 +13,20 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata.State; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel; import org.elasticsearch.test.ESTestCase; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Predicate; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.common.util.set.Sets.newHashSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; public class WildcardExpressionResolverTests extends ESTestCase { @@ -50,107 +47,31 @@ public void testConvertWildcardsJustIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testXXX"))), - equalTo(newHashSet("testXXX")) + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "ku*")), + equalTo(newHashSet("kuku")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "testYYY"))), - equalTo(newHashSet("testXXX", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "ku*"))), - equalTo(newHashSet("testXXX", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*")), equalTo(newHashSet("testXXX", "testXYY", "testYYY", "kuku")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("*", "-kuku"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet( - IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - context, - Arrays.asList("testX*", "-doe", "-testXXX", "-testYYY") - ) - ), - equalTo(newHashSet("testXYY")) - ); - if (indicesOptions == IndicesOptions.lenientExpandOpen()) { - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testXXX"))), - equalTo(newHashSet("testXXX", "-testXXX")) - ); - } else if (indicesOptions == IndicesOptions.strictExpandOpen()) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(context, "testXXX", "-testXXX") - ); - assertEquals("-testXXX", infe.getIndex().getName()); - } - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testXXX", "-testX*"))), - equalTo(newHashSet("testXXX")) - ); - } - - public void testConvertWildcardsTests() { - Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").putAlias(AliasMetadata.builder("alias1")).putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testXYY").putAlias(AliasMetadata.builder("alias2"))) - .put(indexBuilder("testYYY").putAlias(AliasMetadata.builder("alias3"))) - .put(indexBuilder("kuku")); - ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); - - IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.lenientExpandOpen(), - SystemIndexAccessLevel.NONE - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYY*", "alias*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("-kuku"))), - equalTo(newHashSet("-kuku")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("test*", "-testYYY"))), - equalTo(newHashSet("testXXX", "testXYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testX*", "testYYY"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Arrays.asList("testYYY", "testX*"))), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); } public void testConvertWildcardsOpenClosedIndicesTests() { Metadata.Builder mdBuilder = Metadata.builder() - .put(indexBuilder("testXXX").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXXY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testXYY").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("testYYY").state(IndexMetadata.State.OPEN)) - .put(indexBuilder("testYYX").state(IndexMetadata.State.CLOSE)) - .put(indexBuilder("kuku").state(IndexMetadata.State.OPEN)); + .put(indexBuilder("testXXX").state(State.OPEN)) + .put(indexBuilder("testXXY").state(State.OPEN)) + .put(indexBuilder("testXYY").state(State.CLOSE)) + .put(indexBuilder("testYYY").state(State.OPEN)) + .put(indexBuilder("testYYX").state(State.CLOSE)) + .put(indexBuilder("kuku").state(State.OPEN)); ClusterState state = ClusterState.builder(new ClusterName("_name")).metadata(mdBuilder).build(); IndexNameExpressionResolver.Context context = new IndexNameExpressionResolver.Context( @@ -159,7 +80,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -168,7 +89,7 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXYY")) ); context = new IndexNameExpressionResolver.Context( @@ -177,26 +98,9 @@ public void testConvertWildcardsOpenClosedIndicesTests() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("testX*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "testX*")), equalTo(newHashSet("testXXX", "testXXY")) ); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(true, true, false, false), - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(context, "testX*").size(), equalTo(0)); - context = new IndexNameExpressionResolver.Context( - state, - IndicesOptions.fromOptions(false, true, false, false), - SystemIndexAccessLevel.NONE - ); - IndexNameExpressionResolver.Context finalContext = context; - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.resolveExpressions(finalContext, "testX*") - ); - assertThat(infe.getIndex().getName(), is("testX*")); } // issue #13334 @@ -217,28 +121,27 @@ public void testMultipleWildcards() { SystemIndexAccessLevel.NONE ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*")), equalTo(newHashSet("testXXX", "testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*X*Y"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*X*Y")), equalTo(newHashSet("testXXY", "testXYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("kuku*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "kuku*Y*")), equalTo(newHashSet("kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*"))), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*")), equalTo(newHashSet("testXXY", "testXYY", "testYYY", "kukuYYY")) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("test*Y*X"))) - .size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "test*Y*X")).size(), equalTo(0) ); assertThat( - newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, Collections.singletonList("*Y*X"))).size(), + newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, "*Y*X")).size(), equalTo(0) ); } @@ -259,26 +162,6 @@ public void testAll() { newHashSet(IndexNameExpressionResolver.WildcardExpressionResolver.resolveAll(context)), equalTo(newHashSet("testXXX", "testXYY", "testYYY")) ); - assertThat( - newHashSet(IndexNameExpressionResolver.resolveExpressions(context, "_all")), - equalTo(newHashSet("testXXX", "testXYY", "testYYY")) - ); - IndicesOptions noExpandOptions = IndicesOptions.fromOptions( - randomBoolean(), - true, - false, - false, - randomBoolean(), - randomBoolean(), - randomBoolean(), - randomBoolean() - ); - IndexNameExpressionResolver.Context noExpandContext = new IndexNameExpressionResolver.Context( - state, - noExpandOptions, - SystemIndexAccessLevel.NONE - ); - assertThat(IndexNameExpressionResolver.resolveExpressions(noExpandContext, "_all").size(), equalTo(0)); } public void testAllAliases() { @@ -506,112 +389,47 @@ public void testResolveAliases() { ); { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertThat(indices, containsInAnyOrder("foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo_a*") + "foo_a*" ); assertEquals(0, indices.size()); } { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesStrictContext, - Collections.singletonList("foo_a*") - ) + Set indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + skipAliasesStrictContext, + "foo_a*" ); - assertEquals("foo_a*", infe.getIndex().getName()); + assertThat(indices, empty()); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesLenientContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( skipAliasesStrictContext, - Collections.singletonList("foo*") + "foo*" ); assertThat(indices, containsInAnyOrder("foo_foo", "foo_index")); } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - skipAliasesLenientContext, - Collections.singletonList("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(skipAliasesStrictContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } - IndicesOptions noExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions(true, false, false, false, true, false, true, false); - IndexNameExpressionResolver.Context noExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - noExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - noExpandNoAliasesContext, - List.of("foo_alias") - ); - assertThat(indices, containsInAnyOrder("foo_alias")); - } - IndicesOptions strictNoExpandNoAliasesIndicesOptions = IndicesOptions.fromOptions( - false, - true, - false, - false, - true, - false, - true, - false - ); - IndexNameExpressionResolver.Context strictNoExpandNoAliasesContext = new IndexNameExpressionResolver.Context( - state, - strictNoExpandNoAliasesIndicesOptions, - SystemIndexAccessLevel.NONE - ); - { - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(strictNoExpandNoAliasesContext, "foo_alias") - ); - assertEquals( - "The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead.", - iae.getMessage() - ); - } } public void testResolveDataStreams() { @@ -654,17 +472,14 @@ public void testResolveDataStreams() { ); // data streams are not included but expression matches the data stream - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAndAliasesContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat(indices, containsInAnyOrder("foo_index", "foo_foo", "bar_index")); // data streams are not included and expression doesn't match the data steram - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( - indicesAndAliasesContext, - Collections.singletonList("bar_*") - ); + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "bar_*"); assertThat(indices, containsInAnyOrder("bar_bar", "bar_index")); } @@ -691,9 +506,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -707,9 +522,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesAndDataStreamsContext, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -748,9 +563,9 @@ public void testResolveDataStreams() { ); // data stream's corresponding backing indices are resolved - Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + Collection indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("foo_*") + "foo_*" ); assertThat( indices, @@ -764,9 +579,9 @@ public void testResolveDataStreams() { ); // include all wildcard adds the data stream's backing indices - indices = IndexNameExpressionResolver.WildcardExpressionResolver.resolve( + indices = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( indicesAliasesDataStreamsAndHiddenIndices, - Collections.singletonList("*") + "*" ); assertThat( indices, @@ -808,24 +623,17 @@ public void testMatchesConcreteIndicesWildcardAndAliases() { SystemIndexAccessLevel.NONE ); - Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("*")); + Collection matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources( + indicesAndAliasesContext, + "*" + ); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "*"); assertThat(matches, containsInAnyOrder("bar_bar", "foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(indicesAndAliasesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index", "bar_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(onlyIndicesContext, List.of("foo*")); + matches = IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(onlyIndicesContext, "foo*"); assertThat(matches, containsInAnyOrder("foo_foo", "foo_index")); - matches = IndexNameExpressionResolver.WildcardExpressionResolver.resolve(indicesAndAliasesContext, List.of("foo_alias")); - assertThat(matches, containsInAnyOrder("foo_alias")); - IllegalArgumentException iae = expectThrows( - IllegalArgumentException.class, - () -> IndexNameExpressionResolver.resolveExpressions(onlyIndicesContext, "foo_alias") - ); - assertThat( - iae.getMessage(), - containsString("The provided expression [foo_alias] matches an alias, specify the corresponding concrete indices instead") - ); } private static IndexMetadata.Builder indexBuilder(String index, boolean hidden) { @@ -838,10 +646,6 @@ private static IndexMetadata.Builder indexBuilder(String index) { } private static void assertWildcardResolvesToEmpty(IndexNameExpressionResolver.Context context, String wildcardExpression) { - IndexNotFoundException infe = expectThrows( - IndexNotFoundException.class, - () -> IndexNameExpressionResolver.WildcardExpressionResolver.resolve(context, List.of(wildcardExpression)) - ); - assertEquals(wildcardExpression, infe.getIndex().getName()); + assertThat(IndexNameExpressionResolver.WildcardExpressionResolver.matchWildcardToResources(context, wildcardExpression), empty()); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index cd44aaafbfae2..05eb7551330b2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -130,11 +130,11 @@ public boolean equals(Object obj) { * still result in unique snapshot names. */ public static String generateSnapshotName(String name) { - return generateSnapshotName(name, new IndexNameExpressionResolver.ResolverContext()); + return generateSnapshotName(name, System.currentTimeMillis()); } - public static String generateSnapshotName(String name, IndexNameExpressionResolver.Context context) { - String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, context.getStartTime()); + public static String generateSnapshotName(String name, long now) { + String candidate = IndexNameExpressionResolver.resolveDateMathExpression(name, now); // TODO: we are breaking the rules of UUIDs by lowercasing this here, find an alternative (snapshot names must be lowercase) return candidate + "-" + UUIDs.randomBase64UUID().toLowerCase(Locale.ROOT); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java index ce8cd5ae46ace..bee6351582bc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStepTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.LifecycleExecutionState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.RepositoriesMetadata; @@ -185,13 +184,12 @@ public void testNameGeneration() { assertThat(generateSnapshotName("name"), startsWith("name-")); assertThat(generateSnapshotName("name").length(), greaterThan("name-".length())); - IndexNameExpressionResolver.ResolverContext resolverContext = new IndexNameExpressionResolver.ResolverContext(time); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.15-")); - assertThat(generateSnapshotName("", resolverContext).length(), greaterThan("name-2019.03.15-".length())); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.15-")); + assertThat(generateSnapshotName("", time).length(), greaterThan("name-2019.03.15-".length())); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019.03.01-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019.03.01-")); - assertThat(generateSnapshotName("", resolverContext), startsWith("name-2019-03-15.21:09:00-")); + assertThat(generateSnapshotName("", time), startsWith("name-2019-03-15.21:09:00-")); } public void testNameValidation() { From 06840ba54dc741debd6f112bb417ff62b0136540 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Nov 2024 09:02:40 -0500 Subject: [PATCH 02/50] ESQL: Remove historical features (#116966) Our friends working on cluster features are looking to remove the infrastructure for historical features. In `main` all historical features are always enabled because historical features weren't allowed to support versions before 8.a_long_time_ago. All of ours are certainly enabled for versions `main` is wire compatible with. --- .../src/main/resources/boolean.csv-spec | 1 - .../src/main/resources/date.csv-spec | 1 - .../src/main/resources/enrich.csv-spec | 1 - .../src/main/resources/floats.csv-spec | 8 ---- .../src/main/resources/ints.csv-spec | 16 ------- .../src/main/resources/ip.csv-spec | 13 ------ .../src/main/resources/math.csv-spec | 20 --------- .../src/main/resources/spatial.csv-spec | 1 - .../src/main/resources/string.csv-spec | 14 ------ .../src/main/resources/unsigned_long.csv-spec | 8 ---- .../xpack/esql/action/EsqlCapabilities.java | 6 +++ .../xpack/esql/plugin/EsqlFeatures.java | 43 ------------------- .../esql/plugin/TransportEsqlStatsAction.java | 11 +---- .../elasticsearch/xpack/esql/CsvTests.java | 5 ++- 14 files changed, 11 insertions(+), 137 deletions(-) diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec index adbf24cee10b0..1e23cf62917fc 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/boolean.csv-spec @@ -63,7 +63,6 @@ avg(salary):double | always_false:boolean in -required_capability: mv_warn from employees | keep emp_no, is_rehired, still_hired | where is_rehired in (still_hired, true) | where is_rehired != still_hired; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index 7e7c561fac3a5..734e2ef5e475e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -216,7 +216,6 @@ string:keyword |datetime:date ; convertFromUnsignedLong -required_capability: convert_warn row ul = [9223372036854775808, 520128000000] | eval dt = to_datetime(ul); warningRegex:Line 1:58: evaluation of \[to_datetime\(ul\)\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec index 3c38bd190b0b1..25b114b5d1daf 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/enrich.csv-spec @@ -580,7 +580,6 @@ CPH | Copenhagen | POINT(12.5683 55.6761) | Denmark spatialEnrichmentGeoMatchStats#[skip:-8.13.99, reason:ENRICH extended in 8.14.0] required_capability: enrich_load -required_capability: mv_warn FROM airports | ENRICH city_boundaries ON city_location WITH airport, region, city_boundary diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec index 537b69547c6be..3505b52e5599e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/floats.csv-spec @@ -99,7 +99,6 @@ int:integer |dbl:double ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change < 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -115,7 +114,6 @@ emp_no:integer |salary_change:double ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change > 1 | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -131,7 +129,6 @@ emp_no:integer |salary_change:double ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change == 1.19] failed, treating result as null. Only first 20 failures recorded. @@ -143,7 +140,6 @@ emp_no:integer |salary_change:double ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change == 1.19 or salary_change == 7.58 | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change] failed, treating result as null. Only first 20 failures recorded. @@ -156,7 +152,6 @@ emp_no:integer |salary_change:double ; inMultivalue -required_capability: mv_warn from employees | where salary_change in (1.19, 7.58) | keep emp_no, salary_change | sort emp_no; warning:Line 1:24: evaluation of [salary_change in (1.19, 7.58)] failed, treating result as null. Only first 20 failures recorded. @@ -169,7 +164,6 @@ emp_no:integer |salary_change:double ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change < 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -185,7 +179,6 @@ emp_no:integer |salary_change:double ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change > 1) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -201,7 +194,6 @@ emp_no:integer |salary_change:double ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change == 1.19) | keep emp_no, salary_change | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change == 1.19.*\] failed, treating result as null. Only first 20 failures recorded. diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index b399734151412..f4b6d41a7a027 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -1,7 +1,6 @@ // Integral types-specific tests inLongAndInt -required_capability: mv_warn from employees | where avg_worked_seconds in (372957040, salary_change.long, 236703986) | where emp_no in (10017, emp_no - 1) | keep emp_no, avg_worked_seconds; warningRegex:evaluation of \[avg_worked_seconds in \(372957040, salary_change.long, 236703986\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ long:long |ul:ul ; convertDoubleToUL -required_capability: convert_warn row d = 123.4 | eval ul = to_ul(d), overflow = to_ul(1e20); warningRegex:Line 1:48: evaluation of \[to_ul\(1e20\)\] failed, treating result as null. Only first 20 failures recorded. @@ -127,7 +125,6 @@ int:integer |long:long ; convertULToLong -required_capability: convert_warn row ul = [9223372036854775807, 9223372036854775808] | eval long = to_long(ul); warningRegex:Line 1:67: evaluation of \[to_long\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -170,7 +167,6 @@ str1:keyword |str2:keyword |str3:keyword |long1:long |long2:long |long3:long ; convertDoubleToLong -required_capability: convert_warn row d = 123.4 | eval d2l = to_long(d), overflow = to_long(1e19); warningRegex:Line 1:51: evaluation of \[to_long\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -190,7 +186,6 @@ int:integer |ii:integer ; convertLongToInt -required_capability: convert_warn // tag::to_int-long[] ROW long = [5013792, 2147483647, 501379200000] @@ -207,7 +202,6 @@ long:long |int:integer ; convertULToInt -required_capability: convert_warn row ul = [2147483647, 9223372036854775808] | eval int = to_int(ul); warningRegex:Line 1:57: evaluation of \[to_int\(ul\)\] failed, treating result as null. Only first 20 failures recorded. @@ -239,7 +233,6 @@ int_str:keyword |int_dbl_str:keyword |is2i:integer|ids2i:integer ; convertStringToIntFail#[skip:-8.13.99, reason:warning changed in 8.14] -required_capability: mv_warn row str1 = "2147483647.2", str2 = "2147483648", non = "no number" | eval i1 = to_integer(str1), i2 = to_integer(str2), noi = to_integer(non); warningRegex:Line 1:79: evaluation of \[to_integer\(str1\)\] failed, treating result as null. Only first 20 failures recorded. @@ -254,7 +247,6 @@ str1:keyword |str2:keyword |non:keyword |i1:integer |i2:integer | ; convertDoubleToInt -required_capability: convert_warn row d = 123.4 | eval d2i = to_integer(d), overflow = to_integer(1e19); warningRegex:Line 1:54: evaluation of \[to_integer\(1e19\)\] failed, treating result as null. Only first 20 failures recorded. @@ -265,7 +257,6 @@ d:double |d2i:integer |overflow:integer ; lessThanMultivalue -required_capability: mv_warn from employees | where salary_change.int < 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int < 1\] failed, treating result as null. Only first 20 failures recorded. @@ -281,7 +272,6 @@ emp_no:integer |salary_change.int:integer ; greaterThanMultivalue -required_capability: mv_warn from employees | where salary_change.int > 1 | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[salary_change.int > 1\] failed, treating result as null. Only first 20 failures recorded. @@ -297,7 +287,6 @@ emp_no:integer |salary_change.int:integer ; equalToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 0 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int == 0\] failed, treating result as null. Only first 20 failures recorded. @@ -312,7 +301,6 @@ emp_no:integer |salary_change.int:integer ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where salary_change.int == 1 or salary_change.int == 8 | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int\] failed, treating result as null. Only first 20 failures recorded. @@ -325,7 +313,6 @@ emp_no:integer |salary_change.int:integer ; inMultivalue -required_capability: mv_warn from employees | where salary_change.int in (1, 7) | keep emp_no, salary_change.int | sort emp_no; warningRegex:evaluation of \[salary_change.int in \(1, 7\)\] failed, treating result as null. Only first 20 failures recorded. @@ -338,7 +325,6 @@ emp_no:integer |salary_change.int:integer ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int < 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int < 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -354,7 +340,6 @@ emp_no:integer |salary_change.int:integer ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(salary_change.int > 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int > 1.*\] failed, treating result as null. Only first 20 failures recorded. @@ -370,7 +355,6 @@ emp_no:integer |salary_change.int:integer ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(salary_change.int == 1) | keep emp_no, salary_change.int | sort emp_no | limit 5; warningRegex:evaluation of \[.*salary_change.int == 1.*\] failed, treating result as null. Only first 20 failures recorded diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 0fb6994ef759f..4418f7e0aa7ed 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -16,7 +16,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; equals -required_capability: mv_warn from hosts | sort host, card | where ip0 == ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 == ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -60,7 +59,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; lessThan -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 < ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 < ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -73,7 +71,6 @@ lo0 |gamma |fe80::cae2:65ff:fece:feb9|fe81::cae2:65ff:fece:f ; notEquals -required_capability: mv_warn from hosts | sort host, card, ip1 | where ip0 != ip1 | keep card, host, ip0, ip1; warningRegex:evaluation of \[ip0 != ip1\] failed, treating result as null. Only first 20 failures recorded. @@ -125,7 +122,6 @@ null |[127.0.0.1, 127.0.0.2, 127.0.0.3] ; conditional -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | keep eq, ip0, ip1; ignoreOrder:true @@ -146,7 +142,6 @@ fe80::cae2:65ff:fece:fec1 |[fe80::cae2:65ff:fece:feb ; in -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -168,7 +163,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece inWithWarningsRegex#[skip:-8.13.99, reason:regex warnings in tests introduced in v 8.14.0] -required_capability: mv_warn from hosts | eval eq=case(ip0==ip1, ip0, ip1) | where eq in (ip0, ip1) | keep card, host, ip0, ip1, eq; ignoreOrder:true @@ -188,7 +182,6 @@ eth0 |epsilon |[fe80::cae2:65ff:fece:feb9, fe80::cae2:65ff:fece ; cidrMatchSimple -required_capability: mv_warn from hosts | where cidr_match(ip1, "127.0.0.2/32") | keep card, host, ip0, ip1; warningRegex:evaluation of \[cidr_match\(ip1, \\\"127.0.0.2/32\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -199,7 +192,6 @@ eth1 |beta |127.0.0.1 |127.0.0.2 ; cidrMatchNullField -required_capability: mv_warn from hosts | where cidr_match(ip0, "127.0.0.2/32") is null | keep card, host, ip0, ip1; ignoreOrder:true @@ -213,7 +205,6 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs -required_capability: mv_warn //tag::cdirMatchMultipleArgs[] FROM hosts @@ -233,7 +224,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFunctionArg -required_capability: mv_warn from hosts | where cidr_match(ip1, concat("127.0.0.2", "/32"), "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -246,7 +236,6 @@ eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 ; cidrMatchFieldArg -required_capability: mv_warn from hosts | eval cidr="127.0.0.2" | where cidr_match(ip1, cidr, "127.0.0.3/32") | keep card, host, ip0, ip1; ignoreOrder:true @@ -366,7 +355,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithIn -required_capability: mv_warn from hosts | where ip1 in (to_ip("::1"), to_ip("127.0.0.1")) | keep card, host, ip0, ip1; ignoreOrder:true @@ -380,7 +368,6 @@ eth0 |beta |127.0.0.1 |::1 ; pushDownIPWithComparision -required_capability: mv_warn from hosts | where ip1 > to_ip("127.0.0.1") | keep card, ip1; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index da069836504d4..2fe2feb3bc219 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -214,8 +214,6 @@ height:double | s:double ; powSalarySquared -required_capability: pow_double - from employees | eval s = pow(to_long(salary) - 75000, 2) + 10000 | keep salary, s | sort salary desc | limit 4; salary:integer | s:double @@ -631,8 +629,6 @@ base:double | exponent:integer | result:double ; powIntInt -required_capability: pow_double - ROW base = 2, exponent = 2 | EVAL s = POW(base, exponent) ; @@ -642,8 +638,6 @@ base:integer | exponent:integer | s:double ; powIntIntPlusInt -required_capability: pow_double - row s = 1 + pow(2, 2); s:double @@ -658,8 +652,6 @@ s:double ; powIntUL -required_capability: pow_double - row x = pow(1, 9223372036854775808); x:double @@ -667,8 +659,6 @@ x:double ; powLongUL -required_capability: pow_double - row x = to_long(1) | eval x = pow(x, 9223372036854775808); x:double @@ -676,8 +666,6 @@ x:double ; powUnsignedLongUL -required_capability: pow_double - row x = to_ul(1) | eval x = pow(x, 9223372036854775808); x:double @@ -701,8 +689,6 @@ null ; powULInt -required_capability: pow_double - row x = pow(to_unsigned_long(9223372036854775807), 1); x:double @@ -710,8 +696,6 @@ x:double ; powULIntOverrun -required_capability: pow_double - ROW x = POW(9223372036854775808, 2) ; @@ -732,8 +716,6 @@ x:double ; powULLong -required_capability: pow_double - row x = to_long(10) | eval x = pow(to_unsigned_long(10), x); x:double @@ -741,8 +723,6 @@ x:double ; powULLongOverrun -required_capability: pow_double - row x = to_long(100) | eval x = pow(to_unsigned_long(10), x); x:double diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 01e7258e8a6ee..ac9948c90f5e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -3,7 +3,6 @@ ############################################### convertFromStringQuantize -required_capability: spatial_points row wkt = "POINT(42.97109629958868 14.7552534006536)" | eval pt = to_geopoint(wkt); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec index 963245f9f0ea6..e103168d2e589 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/string.csv-spec @@ -390,7 +390,6 @@ emp_no:integer | name:keyword // Note: no matches in MV returned in -required_capability: mv_warn from employees | where job_positions in ("Internship", first_name) | keep emp_no, job_positions; ignoreOrder:true @@ -582,7 +581,6 @@ emp_no:integer |positions:keyword ; lessThanMultivalue -required_capability: mv_warn from employees | where job_positions < "C" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions < \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -595,7 +593,6 @@ emp_no:integer |job_positions:keyword ; greaterThanMultivalue -required_capability: mv_warn from employees | where job_positions > "C" | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[job_positions > \\\"C\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -612,7 +609,6 @@ emp_no:integer |job_positions:keyword ; equalToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions == \\\"Accountant\\\"\] failed, treating result as null. Only first 20 failures recorded. @@ -624,7 +620,6 @@ emp_no:integer |job_positions:keyword ; equalToOrEqualToMultivalue -required_capability: mv_warn from employees | where job_positions == "Accountant" or job_positions == "Tech Lead" | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions\] failed, treating result as null. Only first 20 failures recorded. @@ -637,7 +632,6 @@ emp_no:integer |job_positions:keyword ; inMultivalue -required_capability: mv_warn from employees | where job_positions in ("Accountant", "Tech Lead") | keep emp_no, job_positions | sort emp_no; warningRegex:evaluation of \[job_positions in \(\\\"Accountant\\\", \\"Tech Lead\\\"\)\] failed, treating result as null. Only first 20 failures recorded. @@ -650,7 +644,6 @@ emp_no:integer |job_positions:keyword ; notLessThanMultivalue -required_capability: mv_warn from employees | where not(job_positions < "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions < \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -667,7 +660,6 @@ emp_no:integer |job_positions:keyword ; notGreaterThanMultivalue -required_capability: mv_warn from employees | where not(job_positions > "C") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions > \\\"C\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -680,7 +672,6 @@ emp_no:integer |job_positions:keyword ; notEqualToMultivalue -required_capability: mv_warn from employees | where not(job_positions == "Accountant") | keep emp_no, job_positions | sort emp_no | limit 6; warningRegex:evaluation of \[.*job_positions == \\\"Accountant\\\".*\] failed, treating result as null. Only first 20 failures recorded. @@ -937,7 +928,6 @@ beta | Kubernetes cluster | [beta k8s server, beta k8s server2 ; lengthOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = length(host_group), l2 = length(description) | keep l1, l2; ignoreOrder:true @@ -951,7 +941,6 @@ null | 19 ; startsWithText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = starts_with(host_group, host), l2 = starts_with(description, host) | keep l1, l2; ignoreOrder:true @@ -965,7 +954,6 @@ false | null ; substringOfText -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = substring(host_group, 0, 5), l2 = substring(description, 0, 5) | keep l1, l2; ignoreOrder:true @@ -979,7 +967,6 @@ Gatew | null ; concatOfText -required_capability: mv_warn from hosts | where host == "epsilon" | eval l1 = concat(host, "/", host_group), l2 = concat(host_group, "/", description) | sort l1 | keep l1, l2; warning:Line 1:86: evaluation of [concat(host_group, \"/\", description)] failed, treating result as null. Only first 20 failures recorded. @@ -1518,7 +1505,6 @@ min(f_l):integer | max(f_l):integer | job_positions:keyword ; locateWarnings#[skip:-8.13.99,reason:new string function added in 8.14] -required_capability: mv_warn from hosts | where host=="epsilon" | eval l1 = locate(host_group, "ate"), l2 = locate(description, "ate") | keep l1, l2; ignoreOrder:true diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec index 03d0b71894d9b..fbddb3d0e6989 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/unsigned_long.csv-spec @@ -46,7 +46,6 @@ from ul_logs | sort bytes_in desc nulls last, id | limit 12; ; filterPushDownGT -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in >= to_ul\(74330435873664882\)\] failed, treating result as null. Only first 20 failures recorded. @@ -68,7 +67,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownRange -required_capability: mv_warn from ul_logs | where bytes_in >= to_ul(74330435873664882) | where bytes_in <= to_ul(316080452389500167) | sort bytes_in | eval div = bytes_in / to_ul(pow(10., 15)) | keep bytes_in, div, id | limit 12; warningRegex:evaluation of \[bytes_in .* to_ul\(.*\)\] failed, treating result as null. Only first 20 failures recorded. @@ -82,7 +80,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterPushDownIn -required_capability: mv_warn // TODO: testing framework doesn't perform implicit conversion to UL of given values, needs explicit conversion from ul_logs | where bytes_in in (to_ul(74330435873664882), to_ul(154551962150890564), to_ul(195161570976258241)) | sort bytes_in | keep bytes_in, id; @@ -96,7 +93,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsEquality -required_capability: mv_warn from ul_logs | where bytes_in == bytes_out; warningRegex:evaluation of \[bytes_in == bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -107,7 +103,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; filterOnFieldsInequality -required_capability: mv_warn from ul_logs | sort id | where bytes_in < bytes_out | eval b_in = bytes_in / to_ul(pow(10.,15)), b_out = bytes_out / to_ul(pow(10.,15)) | limit 5; warningRegex:evaluation of \[bytes_in < bytes_out\] failed, treating result as null. Only first 20 failures recorded. @@ -138,7 +133,6 @@ from ul_logs | stats c = count(bytes_in) by bytes_in | sort c desc, bytes_in des ; case -required_capability: mv_warn from ul_logs | where case(bytes_in == to_ul(154551962150890564), true, false); warningRegex:evaluation of \[bytes_in == to_ul\(154551962150890564\)\] failed, treating result as null. Only first 20 failures recorded. @@ -149,7 +143,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toDegrees -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL deg = TO_DEGREES(bytes_in) | KEEP bytes_in, deg ; @@ -161,7 +154,6 @@ warningRegex:java.lang.IllegalArgumentException: single-value function encounter ; toRadians -required_capability: mv_warn FROM ul_logs | WHERE bytes_in == bytes_out | EVAL rad = TO_RADIANS(bytes_in) | KEEP bytes_in, rad ; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c5d3ee29d0bda..c33acf95aa33f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -140,6 +140,12 @@ public enum Cap { */ CASE_MV, + /** + * Support for loading values over enrich. This is supported by all versions of ESQL but not + * the unit test CsvTests. + */ + ENRICH_LOAD, + /** * Optimization for ST_CENTROID changed some results in cartesian data. #108713 */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java index 266f07d22eaf5..a347a6947bf67 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlFeatures.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.Build; -import org.elasticsearch.Version; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.features.FeatureSpecification; import org.elasticsearch.features.NodeFeature; @@ -16,7 +15,6 @@ import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import java.util.Collections; -import java.util.Map; import java.util.Set; /** @@ -48,34 +46,11 @@ public class EsqlFeatures implements FeatureSpecification { */ private static final NodeFeature ST_X_Y = new NodeFeature("esql.st_x_y"); - /** - * When we added the warnings for multivalued fields emitting {@code null} - * when they touched multivalued fields. Added in #102417. - */ - private static final NodeFeature MV_WARN = new NodeFeature("esql.mv_warn"); - - /** - * Support for loading {@code geo_point} and {@code cartesian_point} fields. Added in #102177. - */ - private static final NodeFeature SPATIAL_POINTS = new NodeFeature("esql.spatial_points"); - /** * Changed precision of {@code geo_point} and {@code cartesian_point} fields, by loading from source into WKB. Done in #103691. */ private static final NodeFeature SPATIAL_POINTS_FROM_SOURCE = new NodeFeature("esql.spatial_points_from_source"); - /** - * When we added the warnings when conversion functions fail. Like {@code TO_INT('foo')}. - * Added in ESQL-1183. - */ - private static final NodeFeature CONVERT_WARN = new NodeFeature("esql.convert_warn"); - - /** - * When we flipped the return type of {@code POW} to always return a double. Changed - * in #102183. - */ - private static final NodeFeature POW_DOUBLE = new NodeFeature("esql.pow_double"); - /** * Support for loading {@code geo_shape} and {@code cartesian_shape} fields. Done in #104269. */ @@ -152,12 +127,6 @@ public class EsqlFeatures implements FeatureSpecification { */ public static final NodeFeature METADATA_FIELDS = new NodeFeature("esql.metadata_fields"); - /** - * Support for loading values over enrich. This is supported by all versions of ESQL but not - * the unit test CsvTests. - */ - public static final NodeFeature ENRICH_LOAD = new NodeFeature("esql.enrich_load"); - /** * Support for timespan units abbreviations */ @@ -215,16 +184,4 @@ public Set getFeatures() { return features; } } - - @Override - public Map getHistoricalFeatures() { - return Map.ofEntries( - Map.entry(TransportEsqlStatsAction.ESQL_STATS_FEATURE, Version.V_8_11_0), - Map.entry(MV_WARN, Version.V_8_12_0), - Map.entry(SPATIAL_POINTS, Version.V_8_12_0), - Map.entry(CONVERT_WARN, Version.V_8_12_0), - Map.entry(POW_DOUBLE, Version.V_8_12_0), - Map.entry(ENRICH_LOAD, Version.V_8_12_0) - ); - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java index 985dcf118ac54..4067fc5a4e065 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlStatsAction.java @@ -14,7 +14,6 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.features.FeatureService; -import org.elasticsearch.features.NodeFeature; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -34,8 +33,6 @@ public class TransportEsqlStatsAction extends TransportNodesAction< EsqlStatsResponse.NodeStatsResponse, Void> { - static final NodeFeature ESQL_STATS_FEATURE = new NodeFeature("esql.stats_node"); - // the plan executor holds the metrics private final FeatureService featureService; private final PlanExecutor planExecutor; @@ -63,13 +60,7 @@ public TransportEsqlStatsAction( @Override protected DiscoveryNode[] resolveRequest(EsqlStatsRequest request, ClusterState clusterState) { - if (featureService.clusterHasFeature(clusterState, ESQL_STATS_FEATURE)) { - // use the whole cluster - return super.resolveRequest(request, clusterState); - } else { - // not all nodes in the cluster have upgraded to esql - just use this node for now - return new DiscoveryNode[] { clusterService.localNode() }; - } + return super.resolveRequest(request, clusterState); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index ff0c0d5a5d14c..012720db9efd9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -236,7 +236,10 @@ public final void test() throws Throwable { * are tested in integration tests. */ assumeFalse("metadata fields aren't supported", testCase.requiredCapabilities.contains(cap(EsqlFeatures.METADATA_FIELDS))); - assumeFalse("enrich can't load fields in csv tests", testCase.requiredCapabilities.contains(cap(EsqlFeatures.ENRICH_LOAD))); + assumeFalse( + "enrich can't load fields in csv tests", + testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.ENRICH_LOAD.capabilityName()) + ); assumeFalse( "can't use match in csv tests", testCase.requiredCapabilities.contains(EsqlCapabilities.Cap.MATCH_OPERATOR_COLON.capabilityName()) From 1a4b3d37b5271774b866bbcd5c8eba1907dcbeb2 Mon Sep 17 00:00:00 2001 From: Patrick Doyle <810052+prdoyle@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:10:43 -0500 Subject: [PATCH 03/50] File-based settings health indicator (#117081) * Add FileSettingsService health indicator * spotless * YELLOW for any failure, plus most_recent_failure --- .../elasticsearch/node/NodeConstruction.java | 14 ++- .../service/FileSettingsService.java | 99 +++++++++++++++++-- .../ingest/ReservedPipelineActionTests.java | 7 +- ...leSettingsHealthIndicatorServiceTests.java | 90 +++++++++++++++++ .../service/FileSettingsServiceTests.java | 32 +++++- 5 files changed, 230 insertions(+), 12 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index e1fc586424dec..2488ac894a612 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -187,6 +187,7 @@ import org.elasticsearch.reservedstate.ReservedClusterStateHandlerProvider; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; import org.elasticsearch.reservedstate.service.FileSettingsService; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.rest.action.search.SearchResponseMetrics; import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptService; @@ -1032,10 +1033,12 @@ private void construct( actionModule.getReservedClusterStateService().installStateHandler(new ReservedRepositoryAction(repositoriesService)); actionModule.getReservedClusterStateService().installStateHandler(new ReservedPipelineAction()); + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService = new FileSettingsHealthIndicatorService(); FileSettingsService fileSettingsService = new FileSettingsService( clusterService, actionModule.getReservedClusterStateService(), - environment + environment, + fileSettingsHealthIndicatorService ); RestoreService restoreService = new RestoreService( @@ -1129,7 +1132,8 @@ private void construct( featureService, threadPool, telemetryProvider, - repositoriesService + repositoriesService, + fileSettingsHealthIndicatorService ) ); @@ -1301,7 +1305,8 @@ private Module loadDiagnosticServices( FeatureService featureService, ThreadPool threadPool, TelemetryProvider telemetryProvider, - RepositoriesService repositoriesService + RepositoriesService repositoriesService, + FileSettingsHealthIndicatorService fileSettingsHealthIndicatorService ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1316,7 +1321,8 @@ private Module loadDiagnosticServices( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService, featureService), new DiskHealthIndicatorService(clusterService, featureService), - new ShardsCapacityHealthIndicatorService(clusterService, featureService) + new ShardsCapacityHealthIndicatorService(clusterService, featureService), + fileSettingsHealthIndicatorService ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()); diff --git a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java index ae9ae6f8b5bf9..5f907572641a6 100644 --- a/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java +++ b/server/src/main/java/org/elasticsearch/reservedstate/service/FileSettingsService.java @@ -22,14 +22,27 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.file.MasterNodeFileWatchingService; import org.elasticsearch.env.Environment; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorImpact; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.health.node.HealthInfo; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.BufferedInputStream; import java.io.IOException; import java.nio.file.Files; +import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.atomic.AtomicReference; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.health.ImpactArea.DEPLOYMENT_MANAGEMENT; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION; import static org.elasticsearch.reservedstate.service.ReservedStateVersionCheck.HIGHER_VERSION_ONLY; import static org.elasticsearch.xcontent.XContentType.JSON; @@ -53,6 +66,7 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement public static final String NAMESPACE = "file_settings"; public static final String OPERATOR_DIRECTORY = "operator"; private final ReservedClusterStateService stateService; + private final FileSettingsHealthIndicatorService healthIndicatorService; /** * Constructs the {@link FileSettingsService} @@ -60,10 +74,21 @@ public class FileSettingsService extends MasterNodeFileWatchingService implement * @param clusterService so we can register ourselves as a cluster state change listener * @param stateService an instance of the immutable cluster state controller, so we can perform the cluster state changes * @param environment we need the environment to pull the location of the config and operator directories + * @param healthIndicatorService tracks the success or failure of file-based settings */ - public FileSettingsService(ClusterService clusterService, ReservedClusterStateService stateService, Environment environment) { + public FileSettingsService( + ClusterService clusterService, + ReservedClusterStateService stateService, + Environment environment, + FileSettingsHealthIndicatorService healthIndicatorService + ) { super(clusterService, environment.configFile().toAbsolutePath().resolve(OPERATOR_DIRECTORY).resolve(SETTINGS_FILE_NAME)); this.stateService = stateService; + this.healthIndicatorService = healthIndicatorService; + } + + public FileSettingsHealthIndicatorService healthIndicatorService() { + return healthIndicatorService; } /** @@ -121,6 +146,7 @@ protected boolean shouldRefreshFileState(ClusterState clusterState) { @Override protected void processFileChanges() throws ExecutionException, InterruptedException, IOException { logger.info("processing path [{}] for [{}]", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_VERSION_ONLY); } @@ -131,6 +157,7 @@ protected void processFileChanges() throws ExecutionException, InterruptedExcept @Override protected void processFileOnServiceStart() throws IOException, ExecutionException, InterruptedException { logger.info("processing path [{}] for [{}] on service start", watchedFile(), NAMESPACE); + healthIndicatorService.changeOccurred(); processFileChanges(HIGHER_OR_SAME_VERSION); } @@ -146,6 +173,16 @@ private void processFileChanges(ReservedStateVersionCheck versionCheck) throws I completion.get(); } + private void completeProcessing(Exception e, PlainActionFuture completion) { + if (e != null) { + healthIndicatorService.failureOccurred(e.toString()); + completion.onFailure(e); + } else { + completion.onResponse(null); + healthIndicatorService.successOccurred(); + } + } + @Override protected void onProcessFileChangesException(Exception e) { if (e instanceof ExecutionException) { @@ -172,11 +209,61 @@ protected void processInitialFileMissing() throws ExecutionException, Interrupte completion.get(); } - private static void completeProcessing(Exception e, PlainActionFuture completion) { - if (e != null) { - completion.onFailure(e); - } else { - completion.onResponse(null); + public static class FileSettingsHealthIndicatorService implements HealthIndicatorService { + static final String NAME = "file_settings"; + static final String NO_CHANGES_SYMPTOM = "No file-based setting changes have occurred"; + static final String SUCCESS_SYMPTOM = "The most recent file-based settings were applied successfully"; + static final String FAILURE_SYMPTOM = "The most recent file-based settings encountered an error"; + + static final List STALE_SETTINGS_IMPACT = List.of( + new HealthIndicatorImpact( + NAME, + "stale", + 3, + "The most recent file-based settings changes have not been applied.", + List.of(DEPLOYMENT_MANAGEMENT) + ) + ); + + private final AtomicLong changeCount = new AtomicLong(0); + private final AtomicLong failureStreak = new AtomicLong(0); + private final AtomicReference mostRecentFailure = new AtomicReference<>(); + + public void changeOccurred() { + changeCount.incrementAndGet(); + } + + public void successOccurred() { + failureStreak.set(0); + } + + public void failureOccurred(String description) { + failureStreak.incrementAndGet(); + mostRecentFailure.set(description); + } + + @Override + public String name() { + return NAME; + } + + @Override + public HealthIndicatorResult calculate(boolean verbose, int maxAffectedResourcesCount, HealthInfo healthInfo) { + if (0 == changeCount.get()) { + return createIndicator(GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } + long numFailures = failureStreak.get(); + if (0 == numFailures) { + return createIndicator(GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()); + } else { + return createIndicator( + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", numFailures, "most_recent_failure", mostRecentFailure.get())), + STALE_SETTINGS_IMPACT, + List.of() + ); + } } } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java index 0bc5c69d8ad4b..dc1698e3459ec 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/ReservedPipelineActionTests.java @@ -134,7 +134,12 @@ public void setup() { ); fileSettingsService = spy( - new FileSettingsService(clusterService, mock(ReservedClusterStateService.class), newEnvironment(Settings.EMPTY)) + new FileSettingsService( + clusterService, + mock(ReservedClusterStateService.class), + newEnvironment(Settings.EMPTY), + new FileSettingsService.FileSettingsHealthIndicatorService() + ) ); } diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..03d1adff42c4e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsHealthIndicatorServiceTests.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.reservedstate.service; + +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.FAILURE_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.NO_CHANGES_SYMPTOM; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.STALE_SETTINGS_IMPACT; +import static org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService.SUCCESS_SYMPTOM; + +/** + * Here, we test {@link FileSettingsHealthIndicatorService} in isolation; + * we do not test that {@link FileSettingsService} uses it correctly. + */ +public class FileSettingsHealthIndicatorServiceTests extends ESTestCase { + + FileSettingsHealthIndicatorService healthIndicatorService; + + @Before + public void initialize() { + healthIndicatorService = new FileSettingsHealthIndicatorService(); + } + + public void testInitiallyGreen() { + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, NO_CHANGES_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } + + public void testGreenYellowYellowGreen() { + healthIndicatorService.changeOccurred(); + // This is a strange case: a change occurred, but neither success nor failure have been reported yet. + // While the change is still in progress, we don't change the status. + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie 1"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 1L, "most_recent_failure", "whoopsie 1")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.failureOccurred("whoopsie #2"); + assertEquals( + new HealthIndicatorResult( + "file_settings", + YELLOW, + FAILURE_SYMPTOM, + new SimpleHealthIndicatorDetails(Map.of("failure_streak", 2L, "most_recent_failure", "whoopsie #2")), + STALE_SETTINGS_IMPACT, + List.of() + ), + healthIndicatorService.calculate(false, null) + ); + + healthIndicatorService.successOccurred(); + assertEquals( + new HealthIndicatorResult("file_settings", GREEN, SUCCESS_SYMPTOM, HealthIndicatorDetails.EMPTY, List.of(), List.of()), + healthIndicatorService.calculate(false, null) + ); + } +} diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index 0db29588c4298..ae60a21b6fc22 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NodeConnectionsService; +import org.elasticsearch.cluster.coordination.FailedToCommitClusterStateException; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.metadata.ReservedStateMetadata; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -33,6 +34,7 @@ import org.elasticsearch.env.BuildVersion; import org.elasticsearch.env.Environment; import org.elasticsearch.reservedstate.action.ReservedClusterSettingsAction; +import org.elasticsearch.reservedstate.service.FileSettingsService.FileSettingsHealthIndicatorService; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; @@ -78,6 +80,8 @@ import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoInteractions; +import static org.mockito.Mockito.verifyNoMoreInteractions; public class FileSettingsServiceTests extends ESTestCase { private static final Logger logger = LogManager.getLogger(FileSettingsServiceTests.class); @@ -86,6 +90,7 @@ public class FileSettingsServiceTests extends ESTestCase { private ReservedClusterStateService controller; private ThreadPool threadpool; private FileSettingsService fileSettingsService; + private FileSettingsHealthIndicatorService healthIndicatorService; @Before public void setUp() throws Exception { @@ -131,7 +136,8 @@ public void setUp() throws Exception { List.of(new ReservedClusterSettingsAction(clusterSettings)) ) ); - fileSettingsService = spy(new FileSettingsService(clusterService, controller, env)); + healthIndicatorService = mock(FileSettingsHealthIndicatorService.class); + fileSettingsService = spy(new FileSettingsService(clusterService, controller, env, healthIndicatorService)); } @After @@ -162,6 +168,7 @@ public void testStartStop() { assertTrue(fileSettingsService.watching()); fileSettingsService.stop(); assertFalse(fileSettingsService.watching()); + verifyNoInteractions(healthIndicatorService); } public void testOperatorDirName() { @@ -208,6 +215,10 @@ public void testInitialFileError() throws Exception { verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings assertFalse(settingsChanged.get()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalStateException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -232,6 +243,10 @@ public void testInitialFileWorks() throws Exception { verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -267,6 +282,10 @@ public void testProcessFileChanges() throws Exception { verify(fileSettingsService, times(1)).processFileChanges(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_VERSION_ONLY), any()); + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(2)).successOccurred(); + verifyNoMoreInteractions(healthIndicatorService); } @SuppressWarnings("unchecked") @@ -321,6 +340,11 @@ public void testInvalidJSON() throws Exception { // Note: the name "processFileOnServiceStart" is a bit misleading because it is not // referring to fileSettingsService.start(). Rather, it is referring to the initialization // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. + + verify(healthIndicatorService, times(2)).changeOccurred(); + verify(healthIndicatorService, times(1)).successOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred(argThat(s -> s.startsWith(IllegalArgumentException.class.getName()))); + verifyNoMoreInteractions(healthIndicatorService); } private static void awaitOrBust(CyclicBarrier barrier) { @@ -373,6 +397,12 @@ public void testStopWorksInMiddleOfProcessing() throws Exception { fileSettingsService.close(); // let the deadlocked thread end, so we can cleanly exit the test deadThreadLatch.countDown(); + + verify(healthIndicatorService, times(1)).changeOccurred(); + verify(healthIndicatorService, times(1)).failureOccurred( + argThat(s -> s.startsWith(FailedToCommitClusterStateException.class.getName())) + ); + verifyNoMoreInteractions(healthIndicatorService); } public void testHandleSnapshotRestoreClearsMetadata() throws Exception { From 5500a5ec6838ea02ab45a14e116deafb3a5f71e6 Mon Sep 17 00:00:00 2001 From: Max Hniebergall <137079448+maxhniebergall@users.noreply.github.com> Date: Thu, 21 Nov 2024 09:38:22 -0500 Subject: [PATCH 04/50] [ML] Fix deberta tokenizer bug caused by bug in normalizer (#117189) * Fix deberta tokenizer bug caused by bug in normalizer which caused offesets to be negative * Update docs/changelog/117189.yaml --- docs/changelog/117189.yaml | 5 +++++ .../tokenizers/PrecompiledCharMapNormalizer.java | 2 +- .../nlp/tokenizers/DebertaV2TokenizerTests.java | 14 ++++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117189.yaml diff --git a/docs/changelog/117189.yaml b/docs/changelog/117189.yaml new file mode 100644 index 0000000000000..e89c2d81506d9 --- /dev/null +++ b/docs/changelog/117189.yaml @@ -0,0 +1,5 @@ +pr: 117189 +summary: Fix deberta tokenizer bug caused by bug in normalizer +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java index bbe5bea691c35..5dd7dbbffaa61 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/PrecompiledCharMapNormalizer.java @@ -194,7 +194,7 @@ Reader normalize(CharSequence str) { if (charDelta < 0) { // normalised form is shorter int lastDiff = getLastCumulativeDiff(); - addOffCorrectMap(normalizedCharPos, lastDiff + charDelta); + addOffCorrectMap(normalizedCharPos, lastDiff - charDelta); } else if (charDelta > 0) { // inserted chars, add the offset in the output stream int lastDiff = getLastCumulativeDiff(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java index bbe509da67452..a8461de8630ae 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DebertaV2TokenizerTests.java @@ -94,6 +94,20 @@ public void testTokenize() throws IOException { } } + public void testTokenizeWithHiddenControlCharacters() throws IOException { + try ( + DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( + TEST_CASE_VOCAB, + TEST_CASE_SCORES, + new DebertaV2Tokenization(false, false, null, Tokenization.Truncate.NONE, -1) + ).build() + ) { + TokenizationResult.Tokens tokenization = tokenizer.tokenize("\u009F\u008Fz", Tokenization.Truncate.NONE, -1, 0, null).get(0); + assertThat(tokenStrings(tokenization.tokens().get(0)), contains("▁", "z")); + + } + } + public void testSurrogatePair() throws IOException { try ( DebertaV2Tokenizer tokenizer = DebertaV2Tokenizer.builder( From fa3799f82e628fd086b74f7ac13ed099a036bfd3 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Thu, 21 Nov 2024 15:57:10 +0100 Subject: [PATCH 05/50] [Build] Make JdkDownload Plugin configuration cache compatible (#117120) Also Fix zulu 8 download that we still use for testing old es versions --- .../internal/JdkDownloadPluginFuncTest.groovy | 74 +++++++++++------- .../gradle/internal/fake_zulu_macos.tar.gz | Bin 0 -> 2731 bytes .../elasticsearch/gradle/internal/Jdk.java | 7 +- .../gradle/internal/JdkDownloadPlugin.java | 9 +-- 4 files changed, 53 insertions(+), 37 deletions(-) create mode 100644 build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy index 94df02b280ca6..a4635a7232754 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/JdkDownloadPluginFuncTest.groovy @@ -9,7 +9,6 @@ package org.elasticsearch.gradle.internal - import spock.lang.Unroll import com.github.tomakehurst.wiremock.WireMockServer @@ -24,8 +23,7 @@ import java.nio.file.Paths import java.util.regex.Matcher import java.util.regex.Pattern -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_ADOPTIUM -import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.VENDOR_OPENJDK +import static org.elasticsearch.gradle.internal.JdkDownloadPlugin.* class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { @@ -33,13 +31,11 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { private static final String ADOPT_JDK_VERSION = "12.0.2+10" private static final String ADOPT_JDK_VERSION_11 = "11.0.10+9" private static final String ADOPT_JDK_VERSION_15 = "15.0.2+7" + private static final String AZUL_JDK_VERSION_8 = "8u302+b08" + private static final String AZUL_8_DISTRO_VERSION = "8.56.0.23" private static final String OPEN_JDK_VERSION = "12.0.1+99@123456789123456789123456789abcde" private static final Pattern JDK_HOME_LOGLINE = Pattern.compile("JDK HOME: (.*)") - def setup() { - configurationCacheCompatible = false - } - @Unroll def "jdk #jdkVendor for #platform#suffix are downloaded and extracted"() { given: @@ -56,14 +52,16 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { version = '$jdkVersion' platform = "$platform" architecture = '$arch' + distributionVersion = '$distributionVersion' } } - def theJdks = jdks +// def theJdks = jdks tasks.register("getJdk") { dependsOn jdks.myJdk + def jdk = jdks.myJdk doLast { - println "JDK HOME: " + theJdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -78,22 +76,23 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assertExtraction(result.output, expectedJavaBin); where: - platform | arch | jdkVendor | jdkVersion | expectedJavaBin | suffix - "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "bin/java" | "" - "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "bin/java" | "(old version)" - "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | "Contents/Home/bin/java" | "" - "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | "Contents/Home/bin/java" | "(old version)" - "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "Contents/Home/bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" | "" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | "bin/java" | "(jdk 11)" - "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | "bin/java" | "(jdk 15)" + platform | arch | jdkVendor | jdkVersion | distributionVersion | expectedJavaBin | suffix + "linux" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "linux" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "windows" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "bin/java" | "" + "windows" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "bin/java" | "(old version)" + "darwin" | "x64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "darwin" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "mac" | "x64" | VENDOR_OPENJDK | OPEN_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "mac" | "x64" | VENDOR_OPENJDK | OPENJDK_VERSION_OLD | null | "Contents/Home/bin/java" | "(old version)" + "darwin" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "Contents/Home/bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | null | "bin/java" | "" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_11 | null | "bin/java" | "(jdk 11)" + "linux" | "aarch64" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION_15 | null | "bin/java" | "(jdk 15)" + "darwin" | "aarch64" | VENDOR_ZULU | AZUL_JDK_VERSION_8 | AZUL_8_DISTRO_VERSION | "Contents/Home/bin/java" | "(jdk 8)" } def "transforms are reused across projects"() { @@ -118,9 +117,10 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { } } tasks.register("getJdk") { - dependsOn jdks.myJdk + def jdk = jdks.myJdk + dependsOn jdk doLast { - println "JDK HOME: " + jdks.myJdk + println "JDK HOME: " + jdk } } """ @@ -137,7 +137,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { result.output.count("Unpacking linux-12.0.2-x64.tar.gz using ${SymbolicLinkPreservingUntarTransform.simpleName}") == 1 where: - platform | jdkVendor | jdkVersion | expectedJavaBin + platform | jdkVendor | jdkVersion | expectedJavaBin "linux" | VENDOR_ADOPTIUM | ADOPT_JDK_VERSION | "bin/java" } @@ -159,6 +159,7 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { vendor = '$VENDOR_ADOPTIUM' version = '$ADOPT_JDK_VERSION' platform = "$platform" + distributionVersion = '$ADOPT_JDK_VERSION' architecture = "x64" } } @@ -204,6 +205,8 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { assert matcher.find() == true; String jdkHome = matcher.group(1); Path javaPath = Paths.get(jdkHome, javaBin); + println "canonical " + javaPath.toFile().getCanonicalPath() + Paths.get(jdkHome).toFile().listFiles().each { println it } assert Files.exists(javaPath) == true; true } @@ -221,15 +224,26 @@ class JdkDownloadPluginFuncTest extends AbstractGradleFuncTest { final String versionPath = isOld ? "jdk1/99" : "jdk12.0.1/123456789123456789123456789abcde/99"; final String filename = "openjdk-" + (isOld ? "1" : "12.0.1") + "_" + effectivePlatform + "-x64_bin." + extension(platform); return "/java/GA/" + versionPath + "/GPL/" + filename; + } else if (vendor.equals(VENDOR_ZULU)) { + // we only have a single version of zulu currently in the tests + return "/zulu/bin/zulu8.56.0.23-ca-jdk8.0.302-macosx_aarch64.tar.gz" } } private static byte[] filebytes(final String vendor, final String platform) throws IOException { final String effectivePlatform = getPlatform(vendor, platform); if (vendor.equals(VENDOR_ADOPTIUM)) { - return JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_adoptium_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_adoptium_" + effectivePlatform + "." + extension(platform) + ).getBytes() } else if (vendor.equals(VENDOR_OPENJDK)) { - JdkDownloadPluginFuncTest.class.getResourceAsStream("fake_openjdk_" + effectivePlatform + "." + extension(platform)).getBytes() + return JdkDownloadPluginFuncTest.class.getResourceAsStream( + "fake_openjdk_" + effectivePlatform + "." + extension(platform) + ).getBytes() + } else { + // zulu + String resourcePath = "fake_zulu_" + effectivePlatform + "." + extension(platform) + return JdkDownloadPluginFuncTest.class.getResourceAsStream(resourcePath).getBytes() } } diff --git a/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz b/build-tools-internal/src/integTest/resources/org/elasticsearch/gradle/internal/fake_zulu_macos.tar.gz new file mode 100644 index 0000000000000000000000000000000000000000..87361b67ec76ca35cd2618994c9025bdc1140765 GIT binary patch literal 2731 zcmZvdX*|?x8^_0T7)Mz<6lJZ*5=rCUSG~_>&m~7#cQEEbVvL##7 zlw^!0M)qB08B7#oGUh+`Gd<^d-aYq=>(%|am+SideqY68Btq=F@fCysCh5w*MNleE4ED5jZLq&Bb1Y&AvrsFPy=o%G~fOER-wd#74PD_OZX>2K8$>%Oo zFAwOxle&%9@=-VvH6{f2{Uwh{jwmb~tUtRI@ap}0k?rWj){Re*39B)nB|s#v!reFG zN4Io)D@SayNIubF@t`ezTKU7p2Q}ui=T|Q@v@L{3`*qd&-Xi1so_H6um-YBN`_LI^ z1_QEfaK1C8xDq{e{vu|wucv&YYdDgdaRTVw;83Q)k_Ysu7Lf4YZmrWl%6b%s5pS(6 zWmTDS7YD3SL@Ql%hsoLbTGSb|ElbGEH6B<;vv)6wb?6qQYpzOT3Yz(m3Isx#ipk(a7-@a zrkJSX-vk!Is#>M1RZsc+!K^fj!U2O%(T6r|j-|+)_;_%*mP)v<{j!N6J8sphfHIl1 zlS?R0CpzD&_c4lo{iOR~dQ&2$MQD;ZW=i=vo1A}tbTVmme{y?7UUP=F@x3cqy^bdm zFc>2Zy_;j+^YrI3#jhs)TGzAEI`<|FdDCqVY<*%Ry4Td^b>jVve(Yym&CdQjN2s`Z zEUZ{=bZqipT~4nuKNP6lN#F9#@ub{diSR_L+y4a~=3DIoF(5?mX+bYrShj z!_BmF*2`JDVORrO`g}0^k87C%^?AeeOPMbe4Tk#hu1yUed=AN`Cw=4>6QemK1Q(D=+byFgNnM$hbri*m00dNat^SQPFFHACPTB=yV}>pc8MOAFx++oYH#nmK0zsxjE1o3M{nv=c`v8I9z5s1dSgf9XbVeCy0#`MurTGpi;MKG%b1<+%FDB=zV&-+ zpE{Ti*$XPSpBuilPYL0=(mQY_6tL3-NUs5YzR$eV%GuE6cAi!g+rXs}@)@K! z*Fz!&hSMo`RA28-_(^WM(z!M5qCVrht(XRudx0g6Nq!b}^gAS1rYcb1Gh$K&FQT8C zr3O>;e)z8tvlRX_Ra|M@PZR=?i{n*-hZ0vDfk*-Z2!yM|ixS|?GL%2o;B4l>4xtS| zBE~|ny)KR$o!68Pi{CMmZs+r0ufBk-UEe+fBJq-NLr2|i>&(_3#irPa4;@ zJo1mY?S6g>!6%7yx$7mkJ2%4 zc?pTvk;~z*_&bbsl$9U>-1cO?bF1vk4XAuIRZ>+cAycL6P8w0CGk=VA279SiR2Vms zU$ftTwaZ`A&-QMXeN1JvAEgV{Nu^nO7mc1)p_*q>{rb1~EtZok9mh4h&wbE+gi5!x zv$xm&Qsf`(|9t&)Ic>nb{pMY?Own&6ndjZ}dNwfRfqm?tvpRCD?2syjzy+4H{C7K& z(_rQp9v!3~xM2|U{J~$}29uf|&tv4Z$F{ahu{mh8TdFHG-dX(dr8mVvyzCLl(;0QV zZ0RaJH#SrQ7VXgufny?zN%2<(90-ci565Vx^##P^&Y6d4AD0uGhiZrW_KB5}YKnSt zWENb1V_0TB-G}91mSd_Sr&j8zS`ixL%AwO~stPjU`|HxD~fB?%HIYA1=@g? zeqA-xpri!8qiWUPuR}^H>EQgLnt}zVmvc00Yx)4P1LCBszgq=yvufN0lW<(PlOq1x z9z2L!RQyNeW*nYh?S9p69f@DIZQ~^AS#+nEs%sIBoeI?S6YXaYYcTv8sbfV)FlzDp z#B0an+;(0+9ADx#f~sfZS8<_rPzuj?Or&KLu#o6pV%zuQwrNlpVWS;d`){sPtU#GU z6PG|x9i*^sEc`1nUjyrhZc@&^6`^_^V { private static final Pattern LEGACY_VERSION_PATTERN = Pattern.compile("(\\d)(u\\d+)\\+(b\\d+?)(@([a-f0-9]{32}))?"); private final String name; - private final Configuration configuration; + private final FileCollection configuration; private final Property vendor; private final Property version; private final Property platform; private final Property architecture; private final Property distributionVersion; + private final String configurationName; private String baseVersion; private String major; private String build; @@ -47,6 +49,7 @@ public class Jdk implements Buildable, Iterable { Jdk(String name, Configuration configuration, ObjectFactory objectFactory) { this.name = name; + this.configurationName = configuration.getName(); this.configuration = configuration; this.vendor = objectFactory.property(String.class); this.version = objectFactory.property(String.class); @@ -137,7 +140,7 @@ public String getPath() { } public String getConfigurationName() { - return configuration.getName(); + return configurationName; } @Override diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index 5b195cad3388f..3c278128e43f2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -21,8 +21,6 @@ import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import java.util.Arrays; - /** * @deprecated We wanna get rid from this and custom jdk downloads via this plugin and * make leverage the gradle toolchain resolver capabilities. @@ -38,8 +36,8 @@ public class JdkDownloadPlugin implements Plugin { private static final String REPO_NAME_PREFIX = "jdk_repo_"; private static final String EXTENSION_NAME = "jdks"; - public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.+).jdk"; - public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*linux_aarch64"; + public static final String JDK_TRIMMED_PREFIX = "(jdk-?\\d.*)|(zulu-?\\d.*).jdk"; + public static final String ZULU_LINUX_AARCH_PATTERN = "zulu.*_aarch64"; @Override public void apply(Project project) { @@ -66,7 +64,8 @@ public void apply(Project project) { .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); - parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); + // parameters.setAsFiletreeOutput(true); + // parameters.setKeepStructureFor(Arrays.asList(ZULU_LINUX_AARCH_PATTERN)); }); }); From b42e8f27499b4171c714670dd5c4ab1aa2dd9476 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Thu, 21 Nov 2024 10:11:20 -0500 Subject: [PATCH 06/50] Updating PivotConfig max_page_search_size deprecation warning to critical (#117051) Co-authored-by: Elastic Machine --- .../xpack/core/transform/transforms/pivot/PivotConfig.java | 2 +- .../core/transform/transforms/TransformConfigTests.java | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java index 47f7fea8dc199..6e78c2e8d3ef3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/PivotConfig.java @@ -184,7 +184,7 @@ public void checkForDeprecations(String id, NamedXContentRegistry namedXContentR onDeprecation.accept( // max_page_search_size got deprecated in 7.8, still accepted for 8.x, to be removed in 9.x new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index f1c2de11496bf..8cfecc432c661 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -930,7 +930,7 @@ public void testCheckForDeprecations() { equalTo( Collections.singletonList( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -952,7 +952,7 @@ public void testCheckForDeprecations() { equalTo( List.of( new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, @@ -982,7 +982,7 @@ public void testCheckForDeprecations() { null ), new DeprecationIssue( - Level.WARNING, + Level.CRITICAL, "Transform [" + id + "] uses the deprecated setting [max_page_search_size]", TransformDeprecations.MAX_PAGE_SEARCH_SIZE_BREAKING_CHANGES_URL, TransformDeprecations.ACTION_MAX_PAGE_SEARCH_SIZE_IS_DEPRECATED, From e7a2a203ffa04e57e21319123a0ecf50340d3710 Mon Sep 17 00:00:00 2001 From: Adam Demjen Date: Thu, 21 Nov 2024 11:00:59 -0500 Subject: [PATCH 07/50] Add version prefix to Inference Service API path (#117095) * Add version prefix to EIS API path * Update docs/changelog/117095.yaml --- docs/changelog/117095.yaml | 5 +++++ .../ElasticInferenceServiceSparseEmbeddingsModel.java | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/117095.yaml diff --git a/docs/changelog/117095.yaml b/docs/changelog/117095.yaml new file mode 100644 index 0000000000000..27460924ecb71 --- /dev/null +++ b/docs/changelog/117095.yaml @@ -0,0 +1,5 @@ +pr: 117095 +summary: Add version prefix to Inference Service API path +area: Inference +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index bbbae736dbeb9..731153b3d5dbc 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -108,6 +108,6 @@ private URI createUri() throws URISyntaxException { default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/sparse-text-embedding/" + modelIdUriPath); + return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath); } } From 041022944e7df963d4a3269dfa6ec7791247251e Mon Sep 17 00:00:00 2001 From: Jason Botzas-Coluni <44372106+jaybcee@users.noreply.github.com> Date: Thu, 21 Nov 2024 12:08:14 -0500 Subject: [PATCH 08/50] Remove all mentions of eis and gateway and deprecate flags that do (#116692) --- docs/changelog/116692.yaml | 5 +++ .../xpack/inference/InferenceCrudIT.java | 12 ++++-- .../xpack/inference/InferencePlugin.java | 41 ++++++++++++++++--- .../ElasticInferenceServiceActionCreator.java | 7 +++- ...ServiceSparseEmbeddingsRequestManager.java | 4 +- ...ServiceSparseEmbeddingsResponseEntity.java | 2 +- .../elastic/ElasticInferenceService.java | 5 ++- .../ElasticInferenceServiceComponents.java | 2 +- .../ElasticInferenceServiceFeature.java | 4 +- .../ElasticInferenceServiceSettings.java | 19 ++++++++- ...InferenceServiceSparseEmbeddingsModel.java | 11 ++++- 11 files changed, 92 insertions(+), 20 deletions(-) create mode 100644 docs/changelog/116692.yaml diff --git a/docs/changelog/116692.yaml b/docs/changelog/116692.yaml new file mode 100644 index 0000000000000..30f9e62095436 --- /dev/null +++ b/docs/changelog/116692.yaml @@ -0,0 +1,5 @@ +pr: 116692 +summary: Remove all mentions of eis and gateway and deprecate flags that do +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index 78e064b42bbb2..f5773e73f2b22 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -134,7 +134,8 @@ public void testApisWithoutTaskType() throws IOException { @SuppressWarnings("unchecked") public void testGetServicesWithoutTaskType() throws IOException { List services = getAllServices(); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(18)); } else { assertThat(services.size(), equalTo(17)); @@ -169,7 +170,8 @@ public void testGetServicesWithoutTaskType() throws IOException { "watsonxai" ) ); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(6, "elastic"); } assertArrayEquals(providers, providerList.toArray()); @@ -257,7 +259,8 @@ public void testGetServicesWithCompletionTaskType() throws IOException { public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { List services = getServices(TaskType.SPARSE_EMBEDDING); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { assertThat(services.size(), equalTo(5)); } else { assertThat(services.size(), equalTo(4)); @@ -272,7 +275,8 @@ public void testGetServicesWithSparseEmbeddingTaskType() throws IOException { Arrays.sort(providers); var providerList = new ArrayList<>(Arrays.asList("alibabacloud-ai-search", "elasticsearch", "hugging_face", "test_service")); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() + || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { providerList.add(1, "elastic"); } assertArrayEquals(providers, providerList.toArray()); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 62405a2e9f7de..48458bf4f5086 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.inference; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -91,7 +93,6 @@ import org.elasticsearch.xpack.inference.services.cohere.CohereService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceComponents; -import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature; import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSettings; import org.elasticsearch.xpack.inference.services.elasticsearch.ElasticsearchInternalService; import org.elasticsearch.xpack.inference.services.googleaistudio.GoogleAiStudioService; @@ -113,6 +114,9 @@ import java.util.stream.Stream; import static java.util.Collections.singletonList; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin, SearchPlugin { @@ -135,11 +139,13 @@ public class InferencePlugin extends Plugin implements ActionPlugin, ExtensibleP public static final String NAME = "inference"; public static final String UTILITY_THREAD_POOL_NAME = "inference_utility"; + private static final Logger log = LogManager.getLogger(InferencePlugin.class); + private final Settings settings; private final SetOnce httpFactory = new SetOnce<>(); private final SetOnce amazonBedrockFactory = new SetOnce<>(); private final SetOnce serviceComponents = new SetOnce<>(); - private final SetOnce eisComponents = new SetOnce<>(); + private final SetOnce elasticInferenceServiceComponents = new SetOnce<>(); private final SetOnce inferenceServiceRegistry = new SetOnce<>(); private final SetOnce shardBulkInferenceActionFilter = new SetOnce<>(); private List inferenceServiceExtensions; @@ -207,12 +213,35 @@ public Collection createComponents(PluginServices services) { var inferenceServices = new ArrayList<>(inferenceServiceExtensions); inferenceServices.add(this::getInferenceServiceFactories); - if (ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { - ElasticInferenceServiceSettings eisSettings = new ElasticInferenceServiceSettings(settings); - eisComponents.set(new ElasticInferenceServiceComponents(eisSettings.getEisGatewayUrl())); + // Set elasticInferenceUrl based on feature flags to support transitioning to the new Elastic Inference Service URL without exposing + // internal names like "eis" or "gateway". + ElasticInferenceServiceSettings inferenceServiceSettings = new ElasticInferenceServiceSettings(settings); + + String elasticInferenceUrl = null; + + if (ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + elasticInferenceUrl = inferenceServiceSettings.getElasticInferenceServiceUrl(); + } else if (DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled()) { + log.warn( + "Deprecated flag {} detected for enabling {}. Please use {}.", + ELASTIC_INFERENCE_SERVICE_IDENTIFIER, + DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG, + ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG + ); + elasticInferenceUrl = inferenceServiceSettings.getEisGatewayUrl(); + } + + if (elasticInferenceUrl != null) { + elasticInferenceServiceComponents.set(new ElasticInferenceServiceComponents(elasticInferenceUrl)); inferenceServices.add( - () -> List.of(context -> new ElasticInferenceService(httpFactory.get(), serviceComponents.get(), eisComponents.get())) + () -> List.of( + context -> new ElasticInferenceService( + httpFactory.get(), + serviceComponents.get(), + elasticInferenceServiceComponents.get() + ) + ) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java index c8ada6e535b63..fa096901ed67a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/elastic/ElasticInferenceServiceActionCreator.java @@ -15,9 +15,11 @@ import org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceSparseEmbeddingsModel; import org.elasticsearch.xpack.inference.telemetry.TraceContext; +import java.util.Locale; import java.util.Objects; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.constructFailedToSendRequestMessage; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceActionCreator implements ElasticInferenceServiceActionVisitor { @@ -36,7 +38,10 @@ public ElasticInferenceServiceActionCreator(Sender sender, ServiceComponents ser @Override public ExecutableAction create(ElasticInferenceServiceSparseEmbeddingsModel model) { var requestManager = new ElasticInferenceServiceSparseEmbeddingsRequestManager(model, serviceComponents, traceContext); - var errorMessage = constructFailedToSendRequestMessage(model.uri(), "Elastic Inference Service sparse embeddings"); + var errorMessage = constructFailedToSendRequestMessage( + model.uri(), + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER) + ); return new SenderExecutableAction(sender, requestManager, errorMessage); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java index e7ee41525f07d..bf3409888aaf8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/ElasticInferenceServiceSparseEmbeddingsRequestManager.java @@ -22,9 +22,11 @@ import org.elasticsearch.xpack.inference.telemetry.TraceContext; import java.util.List; +import java.util.Locale; import java.util.function.Supplier; import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends ElasticInferenceServiceRequestManager { @@ -40,7 +42,7 @@ public class ElasticInferenceServiceSparseEmbeddingsRequestManager extends Elast private static ResponseHandler createSparseEmbeddingsHandler() { return new ElasticInferenceServiceResponseHandler( - "Elastic Inference Service sparse embeddings", + String.format(Locale.ROOT, "%s sparse embeddings", ELASTIC_INFERENCE_SERVICE_IDENTIFIER), ElasticInferenceServiceSparseEmbeddingsResponseEntity::fromResponse ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java index 2b36cc5d22cd4..42ca45f75a9c0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/elastic/ElasticInferenceServiceSparseEmbeddingsResponseEntity.java @@ -33,7 +33,7 @@ public class ElasticInferenceServiceSparseEmbeddingsResponseEntity { "Failed to find required field [%s] in Elastic Inference Service embeddings response"; /** - * Parses the EIS json response. + * Parses the Elastic Inference Service json response. * * For a request like: * diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index 98429ed3d001d..e7ce5903163d4 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -62,6 +62,7 @@ public class ElasticInferenceService extends SenderService { public static final String NAME = "elastic"; + public static final String ELASTIC_INFERENCE_SERVICE_IDENTIFIER = "Elastic Inference Service"; private final ElasticInferenceServiceComponents elasticInferenceServiceComponents; @@ -70,10 +71,10 @@ public class ElasticInferenceService extends SenderService { public ElasticInferenceService( HttpRequestSender.Factory factory, ServiceComponents serviceComponents, - ElasticInferenceServiceComponents eisComponents + ElasticInferenceServiceComponents elasticInferenceServiceComponents ) { super(factory, serviceComponents); - this.elasticInferenceServiceComponents = eisComponents; + this.elasticInferenceServiceComponents = elasticInferenceServiceComponents; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java index 4386964e927d2..c5b2cb693df13 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceComponents.java @@ -7,4 +7,4 @@ package org.elasticsearch.xpack.inference.services.elastic; -public record ElasticInferenceServiceComponents(String eisGatewayUrl) {} +public record ElasticInferenceServiceComponents(String elasticInferenceServiceUrl) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java index b0fb6d14ee6f7..324c20d0e48bf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceFeature.java @@ -15,6 +15,8 @@ */ public class ElasticInferenceServiceFeature { - public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + @Deprecated + public static final FeatureFlag DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("eis"); + public static final FeatureFlag ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG = new FeatureFlag("elastic_inference_service"); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java index 8525710c6cf23..bc2daddc2a346 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSettings.java @@ -14,20 +14,37 @@ public class ElasticInferenceServiceSettings { + @Deprecated static final Setting EIS_GATEWAY_URL = Setting.simpleString("xpack.inference.eis.gateway.url", Setting.Property.NodeScope); + static final Setting ELASTIC_INFERENCE_SERVICE_URL = Setting.simpleString( + "xpack.inference.elastic.url", + Setting.Property.NodeScope + ); + // Adjust this variable to be volatile, if the setting can be updated at some point in time + @Deprecated private final String eisGatewayUrl; + private final String elasticInferenceServiceUrl; + public ElasticInferenceServiceSettings(Settings settings) { eisGatewayUrl = EIS_GATEWAY_URL.get(settings); + elasticInferenceServiceUrl = ELASTIC_INFERENCE_SERVICE_URL.get(settings); + } public static List> getSettingsDefinitions() { - return List.of(EIS_GATEWAY_URL); + return List.of(EIS_GATEWAY_URL, ELASTIC_INFERENCE_SERVICE_URL); } + @Deprecated public String getEisGatewayUrl() { return eisGatewayUrl; } + + public String getElasticInferenceServiceUrl() { + return elasticInferenceServiceUrl; + } + } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java index 731153b3d5dbc..cc69df86933de 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceSparseEmbeddingsModel.java @@ -22,8 +22,11 @@ import java.net.URI; import java.net.URISyntaxException; +import java.util.Locale; import java.util.Map; +import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceService.ELASTIC_INFERENCE_SERVICE_IDENTIFIER; + public class ElasticInferenceServiceSparseEmbeddingsModel extends ElasticInferenceServiceModel { private final URI uri; @@ -105,9 +108,13 @@ private URI createUri() throws URISyntaxException { switch (modelId) { case ElserModels.ELSER_V2_MODEL -> modelIdUriPath = "ELSERv2"; - default -> throw new IllegalArgumentException("Unsupported model for EIS [" + modelId + "]"); + default -> throw new IllegalArgumentException( + String.format(Locale.ROOT, "Unsupported model for %s [%s]", ELASTIC_INFERENCE_SERVICE_IDENTIFIER, modelId) + ); } - return new URI(elasticInferenceServiceComponents().eisGatewayUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath); + return new URI( + elasticInferenceServiceComponents().elasticInferenceServiceUrl() + "/api/v1/sparse-text-embedding/" + modelIdUriPath + ); } } From 35116c3b78208ba9f17fc7551b6e8882b564fd4b Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 21 Nov 2024 13:24:22 -0500 Subject: [PATCH 09/50] ESQL: Fix a bug in LuceneQueryExpressionEvaluator (#117252) * ESQL: Fix a bug in LuceneQueryExpressionEvaluator This fixes Lucene usage bug in `LuceneQueryExpressionEvaluator`, the evaluator we plan to use to run things like `MATCH` when we *can't* push it to a source operator. That'll be useful for things like: ``` FROM foo | STATS COUNT(), COUNT() WHERE MATCH(message, "error") ``` Explanation: When using Lucene's `Scorer` and `BulkScorer` you must stay on the same thread. It's a rule. Most of the time nothing bad happens if you shift threads, but sometimes things explode and Lucene doesn't work. Driver can shift from one thread to another - that's just how it's designed. It's a "yield after running a while" kind of thing. In tests we sometimes get a version of the `Scorer` and `BulkScorer` that assert that you don't shift threads. That is what caused this test failure. Anyway! This builds protection into `LuceneQueryExpressionEvaluator` so that if it *does* shift threads then it'll rebuild the `Scorer` and `BulkScorer`. That makes the test happy and makes even the most grump Lucene object happy. Closes #116879 --- muted-tests.yml | 3 --- .../LuceneQueryExpressionEvaluator.java | 23 ++++++++++++++++--- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f8ab532dcaa94..d1e1976262f55 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -223,9 +223,6 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 -- class: org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluatorTests - method: testTermQuery - issue: https://github.com/elastic/elasticsearch/issues/116879 - class: org.elasticsearch.xpack.restart.QueryBuilderBWCIT method: testQueryBuilderBWC {p0=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116989 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java index dcd13671670d8..d7d9da052a962 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluator.java @@ -191,18 +191,29 @@ SegmentState segmentState(int segment) throws IOException { private class SegmentState { private final Weight weight; private final LeafReaderContext ctx; + /** * Lazily initialed {@link Scorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private Scorer scorer; + /** + * Thread that initialized the {@link #scorer}. + */ + private Thread scorerThread; + /** * Lazily initialed {@link BulkScorer} for this. {@code null} here means uninitialized * or that {@link #noMatch} is true. */ private BulkScorer bulkScorer; + /** + * Thread that initialized the {@link #bulkScorer}. + */ + private Thread bulkScorerThread; + /** * Set to {@code true} if, in the process of building a {@link Scorer} or {@link BulkScorer}, * the {@link Weight} tells us there aren't any matches. @@ -223,7 +234,10 @@ BooleanVector scoreDense(int min, int max) throws IOException { if (noMatch) { return blockFactory.newConstantBooleanVector(false, length); } - if (bulkScorer == null) { + if (bulkScorer == null || // The bulkScorer wasn't initialized + Thread.currentThread() != bulkScorerThread // The bulkScorer was initialized on a different thread + ) { + bulkScorerThread = Thread.currentThread(); bulkScorer = weight.bulkScorer(ctx); if (bulkScorer == null) { noMatch = true; @@ -257,8 +271,11 @@ private void initScorer(int minDocId) throws IOException { if (noMatch) { return; } - if (scorer == null || scorer.iterator().docID() > minDocId) { - // The previous block might have been beyond this one, reset the scorer and try again. + if (scorer == null || // Scorer not initialized + scorerThread != Thread.currentThread() || // Scorer initialized on a different thread + scorer.iterator().docID() > minDocId // The previous block came "after" this one + ) { + scorerThread = Thread.currentThread(); scorer = weight.scorer(ctx); if (scorer == null) { noMatch = true; From 97bafb96aa3606c7d3b47c60ec42d571a20f7968 Mon Sep 17 00:00:00 2001 From: Brendan Cully Date: Thu, 21 Nov 2024 10:41:15 -0800 Subject: [PATCH 10/50] Wait for cluster to be green in 40_get_secrets (#117217) Now that fast refresh searches go to search nodes instead of index nodes, the YAML test for `fleet.get_secret` sometimes fails if the shard isn't available yet on the search node before the request arrives. Addresses elasticsearch-serverless #3159, which can unmute the test once this is available. --- .../resources/rest-api-spec/test/fleet/40_secrets_get.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml index e74283bc873e3..ab150e41f310a 100644 --- a/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml +++ b/x-pack/plugin/fleet/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/fleet/40_secrets_get.yml @@ -3,6 +3,12 @@ fleet.post_secret: body: '{"value": "test secret"}' - set: { id: id } + # search node needs to be available for fleet.get_secret to work in stateless. + # The `.fleet-secrets` index is created on demand, and its search replica starts out unassigned, + # so wait_for_no_uninitialized_shards can miss it. + - do: + cluster.health: + wait_for_active_shards: all - do: fleet.get_secret: id: $id From 7aa07f1a2a34c2bdf73863f51485616d598bc7e7 Mon Sep 17 00:00:00 2001 From: Mikhail Berezovskiy Date: Thu, 21 Nov 2024 14:04:42 -0500 Subject: [PATCH 11/50] Explicit HTTP content copy/retain (#116115) --- .../forbidden/es-server-signatures.txt | 2 + docs/changelog/116115.yaml | 5 + .../Netty4IncrementalRequestHandlingIT.java | 5 - .../netty4/Netty4TrashingAllocatorIT.java | 122 ++ .../http/netty4/Netty4HttpRequest.java | 34 - .../transport/netty4/Netty4Utils.java | 2 +- .../transport/netty4/NettyAllocator.java | 113 +- .../transport/netty4/WrappedByteBuf.java | 1036 +++++++++++++++++ .../transport/netty4/NettyAllocatorTests.java | 106 ++ .../common/bytes/BytesReference.java | 23 + .../java/org/elasticsearch/http/HttpBody.java | 9 +- .../org/elasticsearch/http/HttpRequest.java | 6 - .../org/elasticsearch/http/HttpTracer.java | 2 +- .../elasticsearch/rest/BaseRestHandler.java | 9 +- .../elasticsearch/rest/FilterRestHandler.java | 5 - .../elasticsearch/rest/RestController.java | 4 - .../org/elasticsearch/rest/RestHandler.java | 12 - .../org/elasticsearch/rest/RestRequest.java | 61 +- .../elasticsearch/rest/RestRequestFilter.java | 7 +- .../rest/action/document/RestBulkAction.java | 16 +- .../rest/action/document/RestIndexAction.java | 19 +- .../rest/action/search/RestSearchAction.java | 4 - .../common/bytes/BytesArrayTests.java | 6 + .../elasticsearch/http/TestHttpRequest.java | 5 - .../rest/RestControllerTests.java | 5 - .../test/rest/FakeRestRequest.java | 5 - .../EnterpriseSearchBaseRestHandler.java | 2 +- .../logstash/rest/RestPutPipelineAction.java | 2 +- .../xpack/security/audit/AuditUtil.java | 5 +- .../rest/action/SecurityBaseRestHandler.java | 2 +- .../audit/logfile/LoggingAuditTrailTests.java | 8 +- 31 files changed, 1505 insertions(+), 137 deletions(-) create mode 100644 docs/changelog/116115.yaml create mode 100644 modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java create mode 100644 modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java create mode 100644 modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index a9da7995c2b36..68b97050ea012 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -167,3 +167,5 @@ org.elasticsearch.cluster.SnapshotDeletionsInProgress$Entry#(java.lang.Str @defaultMessage Use a Thread constructor with a name, anonymous threads are more difficult to debug java.lang.Thread#(java.lang.Runnable) java.lang.Thread#(java.lang.ThreadGroup, java.lang.Runnable) + +org.elasticsearch.common.bytes.BytesReference#copyBytes(org.elasticsearch.common.bytes.BytesReference) @ This method is a subject for removal. Copying bytes is prone to performance regressions and unnecessary allocations. diff --git a/docs/changelog/116115.yaml b/docs/changelog/116115.yaml new file mode 100644 index 0000000000000..33e1735c20ca4 --- /dev/null +++ b/docs/changelog/116115.yaml @@ -0,0 +1,5 @@ +pr: 116115 +summary: Allow http unsafe buffers by default +area: Network +type: enhancement +issues: [] diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index 3095139ca4685..4bb27af4bd0f5 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -699,11 +699,6 @@ public Collection getRestHandlers( Predicate clusterSupportsFeature ) { return List.of(new BaseRestHandler() { - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public String getName() { return ROUTE; diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java new file mode 100644 index 0000000000000..18c91068ff4f9 --- /dev/null +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4TrashingAllocatorIT.java @@ -0,0 +1,122 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.handler.codec.http.HttpResponseStatus; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.core.Tuple; +import org.elasticsearch.features.NodeFeature; +import org.elasticsearch.http.HttpServerTransport; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.Collection; +import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class Netty4TrashingAllocatorIT extends ESNetty4IntegTestCase { + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.concatLists(List.of(Handler.class), super.nodePlugins()); + } + + @Override + protected boolean addMockHttpTransport() { + return false; + } + + public void testTrashContent() throws InterruptedException { + try (var client = new Netty4HttpClient()) { + var addr = randomFrom(internalCluster().getInstance(HttpServerTransport.class).boundAddress().boundAddresses()).address(); + var content = randomAlphaOfLength(between(1024, 2048)); + var responses = client.post(addr, List.of(new Tuple<>(Handler.ROUTE, content))); + assertEquals(HttpResponseStatus.OK, responses.stream().findFirst().get().status()); + } + } + + public static class Handler extends Plugin implements ActionPlugin { + static final String ROUTE = "/_test/trashing-alloc"; + + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + return List.of(new BaseRestHandler() { + @Override + public String getName() { + return ROUTE; + } + + @Override + public List routes() { + return List.of(new Route(RestRequest.Method.POST, ROUTE)); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + var content = request.releasableContent(); + var iter = content.iterator(); + return (chan) -> { + request.getHttpRequest().release(); + assertFalse(content.hasReferences()); + BytesRef br; + while ((br = iter.next()) != null) { + for (int i = br.offset; i < br.offset + br.length; i++) { + if (br.bytes[i] != 0) { + fail( + new AssertionError( + "buffer is not trashed, off=" + + br.offset + + " len=" + + br.length + + " pos=" + + i + + " ind=" + + (i - br.offset) + ) + ); + } + } + } + chan.sendResponse(new RestResponse(RestStatus.OK, "")); + }; + } + }); + } + } +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java index a1aa211814520..2662ddf7e1440 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequest.java @@ -9,7 +9,6 @@ package org.elasticsearch.http.netty4; -import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.handler.codec.http.DefaultFullHttpRequest; import io.netty.handler.codec.http.EmptyHttpHeaders; @@ -128,39 +127,6 @@ public void release() { } } - @Override - public HttpRequest releaseAndCopy() { - assert released.get() == false; - if (pooled == false) { - return this; - } - try { - final ByteBuf copiedContent = Unpooled.copiedBuffer(request.content()); - HttpBody newContent; - if (content.isStream()) { - newContent = content; - } else { - newContent = Netty4Utils.fullHttpBodyFrom(copiedContent); - } - return new Netty4HttpRequest( - sequence, - new DefaultFullHttpRequest( - request.protocolVersion(), - request.method(), - request.uri(), - copiedContent, - request.headers(), - request.trailingHeaders() - ), - new AtomicBoolean(false), - false, - newContent - ); - } finally { - release(); - } - } - @Override public final Map> getHeaders() { return headers; diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java index 459b6c77be8c3..81b4fd3fbb9ee 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4Utils.java @@ -179,7 +179,7 @@ public boolean hasReferences() { } public static HttpBody.Full fullHttpBodyFrom(final ByteBuf buf) { - return new HttpBody.ByteRefHttpBody(toBytesReference(buf)); + return new HttpBody.ByteRefHttpBody(toReleasableBytesReference(buf)); } public static Recycler createRecycler(Settings settings) { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java index ab38b5f0c4c8c..1eb7e13889338 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/NettyAllocator.java @@ -24,9 +24,11 @@ import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Assertions; import org.elasticsearch.core.Booleans; import org.elasticsearch.monitor.jvm.JvmInfo; +import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; public class NettyAllocator { @@ -44,8 +46,9 @@ public class NettyAllocator { private static final String USE_NETTY_DEFAULT_CHUNK = "es.unsafe.use_netty_default_chunk_and_page_size"; static { + ByteBufAllocator allocator; if (Booleans.parseBoolean(System.getProperty(USE_NETTY_DEFAULT), false)) { - ALLOCATOR = ByteBufAllocator.DEFAULT; + allocator = ByteBufAllocator.DEFAULT; SUGGESTED_MAX_ALLOCATION_SIZE = 1024 * 1024; DESCRIPTION = "[name=netty_default, suggested_max_allocation_size=" + ByteSizeValue.ofBytes(SUGGESTED_MAX_ALLOCATION_SIZE) @@ -127,7 +130,12 @@ public class NettyAllocator { + g1gcRegionSize + "}]"; } - ALLOCATOR = new NoDirectBuffers(delegate); + allocator = new NoDirectBuffers(delegate); + } + if (Assertions.ENABLED) { + ALLOCATOR = new TrashingByteBufAllocator(allocator); + } else { + ALLOCATOR = allocator; } RECYCLER = new Recycler<>() { @@ -353,4 +361,105 @@ public ByteBufAllocator getDelegate() { return delegate; } } + + static class TrashingByteBuf extends WrappedByteBuf { + + private boolean trashed = false; + + protected TrashingByteBuf(ByteBuf buf) { + super(buf); + } + + @Override + public boolean release() { + if (refCnt() == 1) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(); + } + + @Override + public boolean release(int decrement) { + if (refCnt() == decrement && refCnt() > 0) { + // see [NOTE on racy trashContent() calls] + trashContent(); + } + return super.release(decrement); + } + + // [NOTE on racy trashContent() calls]: We trash the buffer content _before_ reducing the ref + // count to zero, which looks racy because in principle a concurrent caller could come along + // and successfully retain() this buffer to keep it alive after it's been trashed. Such a + // caller would sometimes get an IllegalReferenceCountException ofc but that's something it + // could handle - see for instance org.elasticsearch.transport.netty4.Netty4Utils.ByteBufRefCounted.tryIncRef. + // Yet in practice this should never happen, we only ever retain() these buffers while we + // know them to be alive (i.e. via RefCounted#mustIncRef or its moral equivalents) so it'd + // be a bug for a caller to retain() a buffer whose ref count is heading to zero and whose + // contents we've already decided to trash. + private void trashContent() { + if (trashed == false) { + trashed = true; + TrashingByteBufAllocator.trashBuffer(buf); + } + } + } + + static class TrashingCompositeByteBuf extends CompositeByteBuf { + + TrashingCompositeByteBuf(ByteBufAllocator alloc, boolean direct, int maxNumComponents) { + super(alloc, direct, maxNumComponents); + } + + @Override + protected void deallocate() { + TrashingByteBufAllocator.trashBuffer(this); + super.deallocate(); + } + } + + static class TrashingByteBufAllocator extends NoDirectBuffers { + + static int DEFAULT_MAX_COMPONENTS = 16; + + static void trashBuffer(ByteBuf buf) { + for (var nioBuf : buf.nioBuffers()) { + if (nioBuf.hasArray()) { + var from = nioBuf.arrayOffset() + nioBuf.position(); + var to = from + nioBuf.remaining(); + Arrays.fill(nioBuf.array(), from, to, (byte) 0); + } + } + } + + TrashingByteBufAllocator(ByteBufAllocator delegate) { + super(delegate); + } + + @Override + public ByteBuf heapBuffer() { + return new TrashingByteBuf(super.heapBuffer()); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity)); + } + + @Override + public ByteBuf heapBuffer(int initialCapacity, int maxCapacity) { + return new TrashingByteBuf(super.heapBuffer(initialCapacity, maxCapacity)); + } + + @Override + public CompositeByteBuf compositeHeapBuffer() { + return new TrashingCompositeByteBuf(this, false, DEFAULT_MAX_COMPONENTS); + } + + @Override + public CompositeByteBuf compositeHeapBuffer(int maxNumComponents) { + return new TrashingCompositeByteBuf(this, false, maxNumComponents); + } + + } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java new file mode 100644 index 0000000000000..50841cec000f1 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/WrappedByteBuf.java @@ -0,0 +1,1036 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.ByteBufAllocator; +import io.netty.util.ByteProcessor; +import io.netty.util.internal.ObjectUtil; +import io.netty.util.internal.StringUtil; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.nio.channels.FileChannel; +import java.nio.channels.GatheringByteChannel; +import java.nio.channels.ScatteringByteChannel; +import java.nio.charset.Charset; + +/** + * A copy of Netty's WrappedByteBuf. + */ +class WrappedByteBuf extends ByteBuf { + + protected final ByteBuf buf; + + protected WrappedByteBuf(ByteBuf buf) { + this.buf = ObjectUtil.checkNotNull(buf, "buf"); + } + + @Override + public final boolean hasMemoryAddress() { + return buf.hasMemoryAddress(); + } + + @Override + public boolean isContiguous() { + return buf.isContiguous(); + } + + @Override + public final long memoryAddress() { + return buf.memoryAddress(); + } + + @Override + public final int capacity() { + return buf.capacity(); + } + + @Override + public ByteBuf capacity(int newCapacity) { + buf.capacity(newCapacity); + return this; + } + + @Override + public final int maxCapacity() { + return buf.maxCapacity(); + } + + @Override + public final ByteBufAllocator alloc() { + return buf.alloc(); + } + + @Override + public final ByteOrder order() { + return buf.order(); + } + + @Override + public ByteBuf order(ByteOrder endianness) { + return buf.order(endianness); + } + + @Override + public final ByteBuf unwrap() { + return buf; + } + + @Override + public ByteBuf asReadOnly() { + return buf.asReadOnly(); + } + + @Override + public boolean isReadOnly() { + return buf.isReadOnly(); + } + + @Override + public final boolean isDirect() { + return buf.isDirect(); + } + + @Override + public final int readerIndex() { + return buf.readerIndex(); + } + + @Override + public final ByteBuf readerIndex(int readerIndex) { + buf.readerIndex(readerIndex); + return this; + } + + @Override + public final int writerIndex() { + return buf.writerIndex(); + } + + @Override + public final ByteBuf writerIndex(int writerIndex) { + buf.writerIndex(writerIndex); + return this; + } + + @Override + public ByteBuf setIndex(int readerIndex, int writerIndex) { + buf.setIndex(readerIndex, writerIndex); + return this; + } + + @Override + public final int readableBytes() { + return buf.readableBytes(); + } + + @Override + public final int writableBytes() { + return buf.writableBytes(); + } + + @Override + public final int maxWritableBytes() { + return buf.maxWritableBytes(); + } + + @Override + public int maxFastWritableBytes() { + return buf.maxFastWritableBytes(); + } + + @Override + public final boolean isReadable() { + return buf.isReadable(); + } + + @Override + public final boolean isWritable() { + return buf.isWritable(); + } + + @Override + public final ByteBuf clear() { + buf.clear(); + return this; + } + + @Override + public final ByteBuf markReaderIndex() { + buf.markReaderIndex(); + return this; + } + + @Override + public final ByteBuf resetReaderIndex() { + buf.resetReaderIndex(); + return this; + } + + @Override + public final ByteBuf markWriterIndex() { + buf.markWriterIndex(); + return this; + } + + @Override + public final ByteBuf resetWriterIndex() { + buf.resetWriterIndex(); + return this; + } + + @Override + public ByteBuf discardReadBytes() { + buf.discardReadBytes(); + return this; + } + + @Override + public ByteBuf discardSomeReadBytes() { + buf.discardSomeReadBytes(); + return this; + } + + @Override + public ByteBuf ensureWritable(int minWritableBytes) { + buf.ensureWritable(minWritableBytes); + return this; + } + + @Override + public int ensureWritable(int minWritableBytes, boolean force) { + return buf.ensureWritable(minWritableBytes, force); + } + + @Override + public boolean getBoolean(int index) { + return buf.getBoolean(index); + } + + @Override + public byte getByte(int index) { + return buf.getByte(index); + } + + @Override + public short getUnsignedByte(int index) { + return buf.getUnsignedByte(index); + } + + @Override + public short getShort(int index) { + return buf.getShort(index); + } + + @Override + public short getShortLE(int index) { + return buf.getShortLE(index); + } + + @Override + public int getUnsignedShort(int index) { + return buf.getUnsignedShort(index); + } + + @Override + public int getUnsignedShortLE(int index) { + return buf.getUnsignedShortLE(index); + } + + @Override + public int getMedium(int index) { + return buf.getMedium(index); + } + + @Override + public int getMediumLE(int index) { + return buf.getMediumLE(index); + } + + @Override + public int getUnsignedMedium(int index) { + return buf.getUnsignedMedium(index); + } + + @Override + public int getUnsignedMediumLE(int index) { + return buf.getUnsignedMediumLE(index); + } + + @Override + public int getInt(int index) { + return buf.getInt(index); + } + + @Override + public int getIntLE(int index) { + return buf.getIntLE(index); + } + + @Override + public long getUnsignedInt(int index) { + return buf.getUnsignedInt(index); + } + + @Override + public long getUnsignedIntLE(int index) { + return buf.getUnsignedIntLE(index); + } + + @Override + public long getLong(int index) { + return buf.getLong(index); + } + + @Override + public long getLongLE(int index) { + return buf.getLongLE(index); + } + + @Override + public char getChar(int index) { + return buf.getChar(index); + } + + @Override + public float getFloat(int index) { + return buf.getFloat(index); + } + + @Override + public double getDouble(int index) { + return buf.getDouble(index); + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int length) { + buf.getBytes(index, dst, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuf dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, byte[] dst, int dstIndex, int length) { + buf.getBytes(index, dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf getBytes(int index, ByteBuffer dst) { + buf.getBytes(index, dst); + return this; + } + + @Override + public ByteBuf getBytes(int index, OutputStream out, int length) throws IOException { + buf.getBytes(index, out, length); + return this; + } + + @Override + public int getBytes(int index, GatheringByteChannel out, int length) throws IOException { + return buf.getBytes(index, out, length); + } + + @Override + public int getBytes(int index, FileChannel out, long position, int length) throws IOException { + return buf.getBytes(index, out, position, length); + } + + @Override + public CharSequence getCharSequence(int index, int length, Charset charset) { + return buf.getCharSequence(index, length, charset); + } + + @Override + public ByteBuf setBoolean(int index, boolean value) { + buf.setBoolean(index, value); + return this; + } + + @Override + public ByteBuf setByte(int index, int value) { + buf.setByte(index, value); + return this; + } + + @Override + public ByteBuf setShort(int index, int value) { + buf.setShort(index, value); + return this; + } + + @Override + public ByteBuf setShortLE(int index, int value) { + buf.setShortLE(index, value); + return this; + } + + @Override + public ByteBuf setMedium(int index, int value) { + buf.setMedium(index, value); + return this; + } + + @Override + public ByteBuf setMediumLE(int index, int value) { + buf.setMediumLE(index, value); + return this; + } + + @Override + public ByteBuf setInt(int index, int value) { + buf.setInt(index, value); + return this; + } + + @Override + public ByteBuf setIntLE(int index, int value) { + buf.setIntLE(index, value); + return this; + } + + @Override + public ByteBuf setLong(int index, long value) { + buf.setLong(index, value); + return this; + } + + @Override + public ByteBuf setLongLE(int index, long value) { + buf.setLongLE(index, value); + return this; + } + + @Override + public ByteBuf setChar(int index, int value) { + buf.setChar(index, value); + return this; + } + + @Override + public ByteBuf setFloat(int index, float value) { + buf.setFloat(index, value); + return this; + } + + @Override + public ByteBuf setDouble(int index, double value) { + buf.setDouble(index, value); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int length) { + buf.setBytes(index, src, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuf src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src) { + buf.setBytes(index, src); + return this; + } + + @Override + public ByteBuf setBytes(int index, byte[] src, int srcIndex, int length) { + buf.setBytes(index, src, srcIndex, length); + return this; + } + + @Override + public ByteBuf setBytes(int index, ByteBuffer src) { + buf.setBytes(index, src); + return this; + } + + @Override + public int setBytes(int index, InputStream in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, ScatteringByteChannel in, int length) throws IOException { + return buf.setBytes(index, in, length); + } + + @Override + public int setBytes(int index, FileChannel in, long position, int length) throws IOException { + return buf.setBytes(index, in, position, length); + } + + @Override + public ByteBuf setZero(int index, int length) { + buf.setZero(index, length); + return this; + } + + @Override + public int setCharSequence(int index, CharSequence sequence, Charset charset) { + return buf.setCharSequence(index, sequence, charset); + } + + @Override + public boolean readBoolean() { + return buf.readBoolean(); + } + + @Override + public byte readByte() { + return buf.readByte(); + } + + @Override + public short readUnsignedByte() { + return buf.readUnsignedByte(); + } + + @Override + public short readShort() { + return buf.readShort(); + } + + @Override + public short readShortLE() { + return buf.readShortLE(); + } + + @Override + public int readUnsignedShort() { + return buf.readUnsignedShort(); + } + + @Override + public int readUnsignedShortLE() { + return buf.readUnsignedShortLE(); + } + + @Override + public int readMedium() { + return buf.readMedium(); + } + + @Override + public int readMediumLE() { + return buf.readMediumLE(); + } + + @Override + public int readUnsignedMedium() { + return buf.readUnsignedMedium(); + } + + @Override + public int readUnsignedMediumLE() { + return buf.readUnsignedMediumLE(); + } + + @Override + public int readInt() { + return buf.readInt(); + } + + @Override + public int readIntLE() { + return buf.readIntLE(); + } + + @Override + public long readUnsignedInt() { + return buf.readUnsignedInt(); + } + + @Override + public long readUnsignedIntLE() { + return buf.readUnsignedIntLE(); + } + + @Override + public long readLong() { + return buf.readLong(); + } + + @Override + public long readLongLE() { + return buf.readLongLE(); + } + + @Override + public char readChar() { + return buf.readChar(); + } + + @Override + public float readFloat() { + return buf.readFloat(); + } + + @Override + public double readDouble() { + return buf.readDouble(); + } + + @Override + public ByteBuf readBytes(int length) { + return buf.readBytes(length); + } + + @Override + public ByteBuf readSlice(int length) { + return buf.readSlice(length); + } + + @Override + public ByteBuf readRetainedSlice(int length) { + return buf.readRetainedSlice(length); + } + + @Override + public ByteBuf readBytes(ByteBuf dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int length) { + buf.readBytes(dst, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuf dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(byte[] dst, int dstIndex, int length) { + buf.readBytes(dst, dstIndex, length); + return this; + } + + @Override + public ByteBuf readBytes(ByteBuffer dst) { + buf.readBytes(dst); + return this; + } + + @Override + public ByteBuf readBytes(OutputStream out, int length) throws IOException { + buf.readBytes(out, length); + return this; + } + + @Override + public int readBytes(GatheringByteChannel out, int length) throws IOException { + return buf.readBytes(out, length); + } + + @Override + public int readBytes(FileChannel out, long position, int length) throws IOException { + return buf.readBytes(out, position, length); + } + + @Override + public CharSequence readCharSequence(int length, Charset charset) { + return buf.readCharSequence(length, charset); + } + + @Override + public ByteBuf skipBytes(int length) { + buf.skipBytes(length); + return this; + } + + @Override + public ByteBuf writeBoolean(boolean value) { + buf.writeBoolean(value); + return this; + } + + @Override + public ByteBuf writeByte(int value) { + buf.writeByte(value); + return this; + } + + @Override + public ByteBuf writeShort(int value) { + buf.writeShort(value); + return this; + } + + @Override + public ByteBuf writeShortLE(int value) { + buf.writeShortLE(value); + return this; + } + + @Override + public ByteBuf writeMedium(int value) { + buf.writeMedium(value); + return this; + } + + @Override + public ByteBuf writeMediumLE(int value) { + buf.writeMediumLE(value); + return this; + } + + @Override + public ByteBuf writeInt(int value) { + buf.writeInt(value); + return this; + } + + @Override + public ByteBuf writeIntLE(int value) { + buf.writeIntLE(value); + return this; + } + + @Override + public ByteBuf writeLong(long value) { + buf.writeLong(value); + return this; + } + + @Override + public ByteBuf writeLongLE(long value) { + buf.writeLongLE(value); + return this; + } + + @Override + public ByteBuf writeChar(int value) { + buf.writeChar(value); + return this; + } + + @Override + public ByteBuf writeFloat(float value) { + buf.writeFloat(value); + return this; + } + + @Override + public ByteBuf writeDouble(double value) { + buf.writeDouble(value); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int length) { + buf.writeBytes(src, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuf src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src) { + buf.writeBytes(src); + return this; + } + + @Override + public ByteBuf writeBytes(byte[] src, int srcIndex, int length) { + buf.writeBytes(src, srcIndex, length); + return this; + } + + @Override + public ByteBuf writeBytes(ByteBuffer src) { + buf.writeBytes(src); + return this; + } + + @Override + public int writeBytes(InputStream in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(ScatteringByteChannel in, int length) throws IOException { + return buf.writeBytes(in, length); + } + + @Override + public int writeBytes(FileChannel in, long position, int length) throws IOException { + return buf.writeBytes(in, position, length); + } + + @Override + public ByteBuf writeZero(int length) { + buf.writeZero(length); + return this; + } + + @Override + public int writeCharSequence(CharSequence sequence, Charset charset) { + return buf.writeCharSequence(sequence, charset); + } + + @Override + public int indexOf(int fromIndex, int toIndex, byte value) { + return buf.indexOf(fromIndex, toIndex, value); + } + + @Override + public int bytesBefore(byte value) { + return buf.bytesBefore(value); + } + + @Override + public int bytesBefore(int length, byte value) { + return buf.bytesBefore(length, value); + } + + @Override + public int bytesBefore(int index, int length, byte value) { + return buf.bytesBefore(index, length, value); + } + + @Override + public int forEachByte(ByteProcessor processor) { + return buf.forEachByte(processor); + } + + @Override + public int forEachByte(int index, int length, ByteProcessor processor) { + return buf.forEachByte(index, length, processor); + } + + @Override + public int forEachByteDesc(ByteProcessor processor) { + return buf.forEachByteDesc(processor); + } + + @Override + public int forEachByteDesc(int index, int length, ByteProcessor processor) { + return buf.forEachByteDesc(index, length, processor); + } + + @Override + public ByteBuf copy() { + return buf.copy(); + } + + @Override + public ByteBuf copy(int index, int length) { + return buf.copy(index, length); + } + + @Override + public ByteBuf slice() { + return buf.slice(); + } + + @Override + public ByteBuf retainedSlice() { + return buf.retainedSlice(); + } + + @Override + public ByteBuf slice(int index, int length) { + return buf.slice(index, length); + } + + @Override + public ByteBuf retainedSlice(int index, int length) { + return buf.retainedSlice(index, length); + } + + @Override + public ByteBuf duplicate() { + return buf.duplicate(); + } + + @Override + public ByteBuf retainedDuplicate() { + return buf.retainedDuplicate(); + } + + @Override + public int nioBufferCount() { + return buf.nioBufferCount(); + } + + @Override + public ByteBuffer nioBuffer() { + return buf.nioBuffer(); + } + + @Override + public ByteBuffer nioBuffer(int index, int length) { + return buf.nioBuffer(index, length); + } + + @Override + public ByteBuffer[] nioBuffers() { + return buf.nioBuffers(); + } + + @Override + public ByteBuffer[] nioBuffers(int index, int length) { + return buf.nioBuffers(index, length); + } + + @Override + public ByteBuffer internalNioBuffer(int index, int length) { + return buf.internalNioBuffer(index, length); + } + + @Override + public boolean hasArray() { + return buf.hasArray(); + } + + @Override + public byte[] array() { + return buf.array(); + } + + @Override + public int arrayOffset() { + return buf.arrayOffset(); + } + + @Override + public String toString(Charset charset) { + return buf.toString(charset); + } + + @Override + public String toString(int index, int length, Charset charset) { + return buf.toString(index, length, charset); + } + + @Override + public int hashCode() { + return buf.hashCode(); + } + + @Override + @SuppressWarnings("EqualsWhichDoesntCheckParameterClass") + public boolean equals(Object obj) { + return buf.equals(obj); + } + + @Override + public int compareTo(ByteBuf buffer) { + return buf.compareTo(buffer); + } + + @Override + public String toString() { + return StringUtil.simpleClassName(this) + '(' + buf.toString() + ')'; + } + + @Override + public ByteBuf retain(int increment) { + buf.retain(increment); + return this; + } + + @Override + public ByteBuf retain() { + buf.retain(); + return this; + } + + @Override + public ByteBuf touch() { + buf.touch(); + return this; + } + + @Override + public ByteBuf touch(Object hint) { + buf.touch(hint); + return this; + } + + @Override + public final boolean isReadable(int size) { + return buf.isReadable(size); + } + + @Override + public final boolean isWritable(int size) { + return buf.isWritable(size); + } + + @Override + public final int refCnt() { + return buf.refCnt(); + } + + @Override + public boolean release() { + return buf.release(); + } + + @Override + public boolean release(int decrement) { + return buf.release(decrement); + } + +} diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java new file mode 100644 index 0000000000000..a76eb9fa4875b --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/NettyAllocatorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.transport.netty4; + +import io.netty.buffer.ByteBufAllocator; +import io.netty.buffer.Unpooled; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.List; + +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBuf; +import static org.elasticsearch.transport.netty4.NettyAllocator.TrashingByteBufAllocator; + +public class NettyAllocatorTests extends ESTestCase { + + static void assertBufferTrashed(BytesReference bytesRef) throws IOException { + var iter = bytesRef.iterator(); + BytesRef br; + while ((br = iter.next()) != null) { + for (var i = br.offset; i < br.offset + br.length; i++) { + assertEquals("off=" + br.offset + " len=" + br.length + " i=" + i, 0, br.bytes[i]); + } + } + } + + public void testTrashArrayByteBuf() { + var arr = randomByteArrayOfLength(between(1024, 2048)); + var buf = Unpooled.wrappedBuffer(arr); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + var emptyArr = new byte[arr.length]; + assertArrayEquals(emptyArr, arr); + } + + public void testNioBufsTrashingByteBuf() { + var arrCnt = between(1, 16); + var byteArrs = new byte[arrCnt][]; + var byteBufs = new ByteBuffer[arrCnt]; + for (var i = 0; i < arrCnt; i++) { + byteArrs[i] = randomByteArrayOfLength(between(1024, 2048)); + byteBufs[i] = ByteBuffer.wrap(byteArrs[i]); + } + var buf = Unpooled.wrappedBuffer(byteBufs); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + for (int i = 0; i < arrCnt; i++) { + for (int j = 0; j < byteArrs[i].length; j++) { + assertEquals(0, byteArrs[i][j]); + } + } + } + + public void testNioBufOffsetTrashingByteBuf() { + var arr = randomByteArrayOfLength(1024); + var off = 1; + var len = arr.length - 2; + arr[0] = 1; + arr[arr.length - 1] = 1; + var buf = Unpooled.wrappedBuffer(arr, off, len); + var tBuf = new TrashingByteBuf(buf); + tBuf.release(); + assertEquals(1, arr[0]); + assertEquals(1, arr[arr.length - 1]); + for (int i = 1; i < arr.length - 1; i++) { + assertEquals("at index " + i, 0, arr[i]); + } + } + + public void testTrashingByteBufAllocator() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var size = between(1024 * 1024, 10 * 1024 * 1024); + + // use 3 different heap allocation methods + for (var buf : List.of(alloc.heapBuffer(), alloc.heapBuffer(1024), alloc.heapBuffer(1024, size))) { + buf.writeBytes(randomByteArrayOfLength(size)); + var bytesRef = Netty4Utils.toBytesReference(buf); + buf.release(); + assertBufferTrashed(bytesRef); + } + } + + public void testTrashingCompositeByteBuf() throws IOException { + var alloc = new TrashingByteBufAllocator(ByteBufAllocator.DEFAULT); + var compBuf = alloc.compositeHeapBuffer(); + for (var i = 0; i < between(1, 10); i++) { + var buf = alloc.heapBuffer().writeBytes(randomByteArrayOfLength(between(1024, 8192))); + compBuf.addComponent(true, buf); + } + var bytesRef = Netty4Utils.toBytesReference(compBuf); + compBuf.release(); + assertBufferTrashed(bytesRef); + } + +} diff --git a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java index ddcfc1ea7eed8..51e6512072e41 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/BytesReference.java @@ -74,6 +74,29 @@ static ByteBuffer[] toByteBuffers(BytesReference reference) { } } + /** + * Allocates new buffer and copy bytes from given BytesReference. + * + * @deprecated copying bytes is a right place for performance regression and unnecessary allocations. + * This method exists to serve very few places that struggle to handle reference counted buffers. + */ + @Deprecated(forRemoval = true) + static BytesReference copyBytes(BytesReference bytesReference) { + byte[] arr = new byte[bytesReference.length()]; + int offset = 0; + final BytesRefIterator iterator = bytesReference.iterator(); + try { + BytesRef slice; + while ((slice = iterator.next()) != null) { + System.arraycopy(slice.bytes, slice.offset, arr, offset, slice.length); + offset += slice.length; + } + return new BytesArray(arr); + } catch (IOException e) { + throw new AssertionError(e); + } + } + /** * Returns BytesReference composed of the provided ByteBuffers. */ diff --git a/server/src/main/java/org/elasticsearch/http/HttpBody.java b/server/src/main/java/org/elasticsearch/http/HttpBody.java index a10487502ed3c..6571125677fab 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpBody.java +++ b/server/src/main/java/org/elasticsearch/http/HttpBody.java @@ -9,7 +9,6 @@ package org.elasticsearch.http; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.core.Nullable; @@ -21,11 +20,11 @@ public sealed interface HttpBody extends Releasable permits HttpBody.Full, HttpBody.Stream { static Full fromBytesReference(BytesReference bytesRef) { - return new ByteRefHttpBody(bytesRef); + return new ByteRefHttpBody(ReleasableBytesReference.wrap(bytesRef)); } static Full empty() { - return new ByteRefHttpBody(BytesArray.EMPTY); + return new ByteRefHttpBody(ReleasableBytesReference.empty()); } default boolean isFull() { @@ -56,7 +55,7 @@ default Stream asStream() { * Full content represents a complete http body content that can be accessed immediately. */ non-sealed interface Full extends HttpBody { - BytesReference bytes(); + ReleasableBytesReference bytes(); @Override default void close() {} @@ -114,5 +113,5 @@ interface ChunkHandler extends Releasable { default void close() {} } - record ByteRefHttpBody(BytesReference bytes) implements Full {} + record ByteRefHttpBody(ReleasableBytesReference bytes) implements Full {} } diff --git a/server/src/main/java/org/elasticsearch/http/HttpRequest.java b/server/src/main/java/org/elasticsearch/http/HttpRequest.java index ca6e51f2cec08..b4b1bb84433c9 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpRequest.java +++ b/server/src/main/java/org/elasticsearch/http/HttpRequest.java @@ -52,10 +52,4 @@ enum HttpVersion { */ void release(); - /** - * If this instances uses any pooled resources, creates a copy of this instance that does not use any pooled resources and releases - * any resources associated with this instance. If the instance does not use any shared resources, returns itself. - * @return a safe unpooled http request - */ - HttpRequest releaseAndCopy(); } diff --git a/server/src/main/java/org/elasticsearch/http/HttpTracer.java b/server/src/main/java/org/elasticsearch/http/HttpTracer.java index 3d8360e6ee3fa..d6daf11c0539a 100644 --- a/server/src/main/java/org/elasticsearch/http/HttpTracer.java +++ b/server/src/main/java/org/elasticsearch/http/HttpTracer.java @@ -94,7 +94,7 @@ HttpTracer maybeLogRequest(RestRequest restRequest, @Nullable Exception e) { private void logFullContent(RestRequest restRequest) { try (var stream = HttpBodyTracer.getBodyOutputStream(restRequest.getRequestId(), HttpBodyTracer.Type.REQUEST)) { - restRequest.content().writeTo(stream); + restRequest.releasableContent().writeTo(stream); } catch (Exception e2) { assert false : e2; // no real IO here } diff --git a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java index f1b59ed14cefb..4564a37dacf4a 100644 --- a/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/BaseRestHandler.java @@ -122,6 +122,7 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl ); } + usageCount.increment(); if (request.isStreamedContent()) { assert action instanceof RequestBodyChunkConsumer; var chunkConsumer = (RequestBodyChunkConsumer) action; @@ -137,11 +138,11 @@ public void close() { chunkConsumer.streamClose(); } }); + action.accept(channel); + } else { + action.accept(channel); + request.getHttpRequest().release(); } - - usageCount.increment(); - // execute the action - action.accept(channel); } } diff --git a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java index cb5155cb0de0b..21a44ac9af5c8 100644 --- a/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/FilterRestHandler.java @@ -43,11 +43,6 @@ public boolean canTripCircuitBreaker() { return delegate.canTripCircuitBreaker(); } - @Override - public boolean allowsUnsafeBuffers() { - return delegate.allowsUnsafeBuffers(); - } - @Override public boolean supportsBulkContent() { return delegate.supportsBulkContent(); diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 7446ec5bb6717..49fe794bbe615 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -432,10 +432,6 @@ private void dispatchRequest( } // iff we could reserve bytes for the request we need to send the response also over this channel responseChannel = new ResourceHandlingHttpChannel(channel, circuitBreakerService, contentLength, methodHandlers); - // TODO: Count requests double in the circuit breaker if they need copying? - if (handler.allowsUnsafeBuffers() == false) { - request.ensureSafeBuffers(); - } if (handler.allowSystemIndexAccessByDefault() == false) { // The ELASTIC_PRODUCT_ORIGIN_HTTP_HEADER indicates that the request is coming from an Elastic product and diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index cf66e402d3691..572e92e369a63 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -69,18 +69,6 @@ default Scope getServerlessScope() { return serverlessScope == null ? null : serverlessScope.value(); } - /** - * Indicates if the RestHandler supports working with pooled buffers. If the request handler will not escape the return - * {@link RestRequest#content()} or any buffers extracted from it then there is no need to make a copies of any pooled buffers in the - * {@link RestRequest} instance before passing a request to this handler. If this instance does not support pooled/unsafe buffers - * {@link RestRequest#ensureSafeBuffers()} should be called on any request before passing it to {@link #handleRequest}. - * - * @return true iff the handler supports requests that make use of pooled buffers - */ - default boolean allowsUnsafeBuffers() { - return false; - } - /** * The list of {@link Route}s that this RestHandler is responsible for handling. */ diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 17eda305b5ccf..17d85a8eabb1c 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -16,17 +16,21 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RestApiVersion; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpBody; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; @@ -51,6 +55,8 @@ public class RestRequest implements ToXContent.Params, Traceable { + private static final Logger logger = LogManager.getLogger(RestRequest.class); + /** * Internal marker request parameter to indicate that a request was made in serverless mode. Use this parameter, together with * {@link #OPERATOR_REQUEST} if you need to toggle behavior for serverless, for example to enforce partial API restrictions @@ -187,15 +193,6 @@ protected RestRequest(RestRequest other) { } } - /** - * Invoke {@link HttpRequest#releaseAndCopy()} on the http request in this instance and replace a pooled http request - * with an unpooled copy. This is supposed to be used before passing requests to {@link RestHandler} instances that can not safely - * handle http requests that use pooled buffers as determined by {@link RestHandler#allowsUnsafeBuffers()}. - */ - void ensureSafeBuffers() { - httpRequest = httpRequest.releaseAndCopy(); - } - /** * Creates a new REST request. * @@ -306,9 +303,31 @@ public boolean isFullContent() { return httpRequest.body().isFull(); } + /** + * Returns a copy of HTTP content. The copy is GC-managed and does not require reference counting. + * Please use {@link #releasableContent()} to avoid content copy. + */ + @SuppressForbidden(reason = "temporarily support content copy while migrating RestHandlers to ref counted pooled buffers") public BytesReference content() { + return BytesReference.copyBytes(releasableContent()); + } + + /** + * Returns a direct reference to the network buffer containing the request body. The HTTP layers will release their references to this + * buffer as soon as they have finished the synchronous steps of processing the request on the network thread, which will by default + * release the buffer back to the pool where it may be re-used for another request. If you need to keep the buffer alive past the end of + * these synchronous steps, acquire your own reference to this buffer and release it once it's no longer needed. + */ + public ReleasableBytesReference releasableContent() { this.contentConsumed = true; - return httpRequest.body().asFull().bytes(); + var bytes = httpRequest.body().asFull().bytes(); + if (bytes.hasReferences() == false) { + var e = new IllegalStateException("http releasable content accessed after release"); + logger.error(e.getMessage(), e); + assert false : e; + throw e; + } + return bytes; } public boolean isStreamedContent() { @@ -319,18 +338,32 @@ public HttpBody.Stream contentStream() { return httpRequest.body().asStream(); } - /** - * @return content of the request body or throw an exception if the body or content type is missing - */ - public final BytesReference requiredContent() { + private void ensureContent() { if (hasContent() == false) { throw new ElasticsearchParseException("request body is required"); } else if (xContentType.get() == null) { throwValidationException("unknown content type"); } + } + + /** + * @return copy of the request body or throw an exception if the body or content type is missing. + * See {@link #content()}. Please use {@link #requiredReleasableContent()} to avoid content copy. + */ + public final BytesReference requiredContent() { + ensureContent(); return content(); } + /** + * Returns reference to the network buffer of HTTP content or throw an exception if the body or content type is missing. + * See {@link #releasableContent()}. It's a recommended method to handle HTTP content without copying it. + */ + public ReleasableBytesReference requiredReleasableContent() { + ensureContent(); + return releasableContent(); + } + private static void throwValidationException(String msg) { ValidationException unknownContentType = new ValidationException(); unknownContentType.addValidationError(msg); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java index e4105363e1bce..57b4d2990c8e0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequestFilter.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Tuple; @@ -44,10 +45,10 @@ public boolean hasContent() { } @Override - public BytesReference content() { + public ReleasableBytesReference releasableContent() { if (filteredBytes == null) { Tuple> result = XContentHelper.convertToMap( - restRequest.requiredContent(), + restRequest.requiredReleasableContent(), true, restRequest.getXContentType() ); @@ -63,7 +64,7 @@ public BytesReference content() { throw new ElasticsearchException("failed to parse request", e); } } - return filteredBytes; + return ReleasableBytesReference.wrap(filteredBytes); } }; } else { diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java index de3fd390ec86d..9428ef5390b2f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestBulkAction.java @@ -10,6 +10,7 @@ package org.elasticsearch.rest.action.document; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkRequestParser; @@ -102,9 +103,11 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC boolean defaultRequireDataStream = request.paramAsBoolean(DocWriteRequest.REQUIRE_DATA_STREAM, false); bulkRequest.timeout(request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT)); bulkRequest.setRefreshPolicy(request.param("refresh")); + ReleasableBytesReference content = request.requiredReleasableContent(); + try { bulkRequest.add( - request.requiredContent(), + content, defaultIndex, defaultRouting, defaultFetchSourceContext, @@ -119,8 +122,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } catch (Exception e) { return channel -> new RestToXContentListener<>(channel).onFailure(parseFailureException(e)); } - - return channel -> client.bulk(bulkRequest, new RestRefCountedChunkedToXContentListener<>(channel)); + return channel -> { + content.mustIncRef(); + client.bulk(bulkRequest, ActionListener.releaseAfter(new RestRefCountedChunkedToXContentListener<>(channel), content)); + }; } else { String waitForActiveShards = request.param("wait_for_active_shards"); TimeValue timeout = request.paramAsTime("timeout", BulkShardRequest.DEFAULT_TIMEOUT); @@ -270,11 +275,6 @@ public boolean supportsBulkContent() { return true; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } - @Override public Set supportedCapabilities() { return capabilities; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index c2437dcb96fa6..d81ac03492d59 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -9,12 +9,14 @@ package org.elasticsearch.rest.action.document; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.common.bytes.ReleasableBytesReference; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -104,11 +106,12 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + ReleasableBytesReference source = request.requiredReleasableContent(); IndexRequest indexRequest = new IndexRequest(request.param("index")); indexRequest.id(request.param("id")); indexRequest.routing(request.param("routing")); indexRequest.setPipeline(request.param("pipeline")); - indexRequest.source(request.requiredContent(), request.getXContentType()); + indexRequest.source(source, request.getXContentType()); indexRequest.timeout(request.paramAsTime("timeout", IndexRequest.DEFAULT_TIMEOUT)); indexRequest.setRefreshPolicy(request.param("refresh")); indexRequest.version(RestActions.parseVersion(request)); @@ -126,10 +129,16 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC indexRequest.opType(sOpType); } - return channel -> client.index( - indexRequest, - new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())) - ); + return channel -> { + source.mustIncRef(); + client.index( + indexRequest, + ActionListener.releaseAfter( + new RestToXContentListener<>(channel, DocWriteResponse::status, r -> r.getLocation(indexRequest.routing())), + source + ) + ); + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 80a85d3b9b748..ff062084a3cbb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -423,8 +423,4 @@ protected Set responseParams() { return RESPONSE_PARAMS; } - @Override - public boolean allowsUnsafeBuffers() { - return true; - } } diff --git a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java index ad298e7aa8307..3fd8535cd5c27 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/BytesArrayTests.java @@ -107,4 +107,10 @@ public void testGetDoubleLE() { Exception e = expectThrows(ArrayIndexOutOfBoundsException.class, () -> ref.getDoubleLE(9)); assertThat(e.getMessage(), equalTo("Index 9 out of bounds for length 9")); } + + public void testCopyBytes() { + var data = randomByteArrayOfLength(between(1024, 1024 * 1024 * 50)); + var copy = BytesReference.copyBytes(new BytesArray(data)); + assertArrayEquals(data, BytesReference.toBytes(copy)); + } } diff --git a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java index 8cd61453a3391..27dc0be673abb 100644 --- a/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java +++ b/server/src/test/java/org/elasticsearch/http/TestHttpRequest.java @@ -85,11 +85,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index b7d38f6f299c7..2fdb3daa26da4 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -906,11 +906,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java index 9ddcf39d24d98..0c466b9162eb8 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/FakeRestRequest.java @@ -138,11 +138,6 @@ public HttpResponse createResponse(RestStatus status, ChunkedRestResponseBodyPar @Override public void release() {} - @Override - public HttpRequest releaseAndCopy() { - return this; - } - @Override public Exception getInboundException() { return inboundException; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java index aa200f7ae9acb..214f9150dfcc5 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearchBaseRestHandler.java @@ -32,7 +32,7 @@ protected final BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest r // We need to consume parameters and content from the REST request in order to bypass unrecognized param errors // and return a license error. request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); return channel -> channel.sendResponse( new RestResponse(channel, LicenseUtils.newComplianceException(this.licenseState, this.product)) ); diff --git a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java index a9992e168bc66..2ea56b147bf9c 100644 --- a/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java +++ b/x-pack/plugin/logstash/src/main/java/org/elasticsearch/xpack/logstash/rest/RestPutPipelineAction.java @@ -49,7 +49,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } return restChannel -> { - final String content = request.content().utf8ToString(); + final String content = request.releasableContent().utf8ToString(); client.execute( PutPipelineAction.INSTANCE, new PutPipelineRequest(id, content, request.getXContentType()), diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java index 13e3e40887d89..429b632cdac18 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/AuditUtil.java @@ -27,10 +27,11 @@ public class AuditUtil { public static String restRequestContent(RestRequest request) { if (request.hasContent()) { + var content = request.releasableContent(); try { - return XContentHelper.convertToJson(request.content(), false, false, request.getXContentType()); + return XContentHelper.convertToJson(content, false, false, request.getXContentType()); } catch (IOException ioe) { - return "Invalid Format: " + request.content().utf8ToString(); + return "Invalid Format: " + content.utf8ToString(); } } return ""; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java index f0405e42f1f22..df21f5d4eeb0b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/SecurityBaseRestHandler.java @@ -75,7 +75,7 @@ protected final RestChannelConsumer prepareRequest(RestRequest request, NodeClie return innerPrepareRequest(request, client); } else { request.params().keySet().forEach(key -> request.param(key, "")); - request.content(); + request.releasableContent(); // mark content consumed return channel -> channel.sendResponse(new RestResponse(channel, failedFeature)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 5adc1e351931d..3be40c280874d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -2614,7 +2614,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { @@ -2643,8 +2643,8 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content())) { - checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.getHttpRequest().body().asFull().bytes().utf8ToString()); + if (includeRequestBody && request.hasContent()) { + checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { checkedFields.put(LoggingAuditTrail.URL_QUERY_FIELD_NAME, "foo=bar&evac=true"); @@ -2672,7 +2672,7 @@ public void testAuthenticationSuccessRest() throws Exception { checkedFields.put(LoggingAuditTrail.REQUEST_METHOD_FIELD_NAME, request.method().toString()); checkedFields.put(LoggingAuditTrail.REQUEST_ID_FIELD_NAME, requestId); checkedFields.put(LoggingAuditTrail.URL_PATH_FIELD_NAME, "_uri"); - if (includeRequestBody && Strings.hasLength(request.content().utf8ToString())) { + if (includeRequestBody && request.hasContent()) { checkedFields.put(LoggingAuditTrail.REQUEST_BODY_FIELD_NAME, request.content().utf8ToString()); } if (params.isEmpty() == false) { From b378a1bb54650247c867329f6bf3265918a89fa0 Mon Sep 17 00:00:00 2001 From: elasticsearchmachine Date: Wed, 20 Nov 2024 22:09:17 +0000 Subject: [PATCH 12/50] Bump 8.x to 8.18.0 --- .backportrc.json | 4 +- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 16 + .buildkite/pipelines/periodic.yml | 23 +- .ci/bwcVersions | 1 + .ci/snapshotBwcVersions | 1 + .../reference/migration/migrate_8_18.asciidoc | 20 + docs/reference/migration/migrate_9_0.asciidoc | 548 ++++++------ docs/reference/release-notes/8.18.0.asciidoc | 8 + docs/reference/release-notes/9.0.0.asciidoc | 812 ++++++------------ .../release-notes/highlights.asciidoc | 168 +--- 11 files changed, 619 insertions(+), 984 deletions(-) create mode 100644 docs/reference/migration/migrate_8_18.asciidoc create mode 100644 docs/reference/release-notes/8.18.0.asciidoc diff --git a/.backportrc.json b/.backportrc.json index 03f3f892f9227..20287f0bfc0e6 100644 --- a/.backportrc.json +++ b/.backportrc.json @@ -1,10 +1,10 @@ { "upstream" : "elastic/elasticsearch", - "targetBranchChoices" : [ "main", "8.x", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], + "targetBranchChoices" : [ "main", "8.x", "8.17", "8.16", "8.15", "8.14", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ], "targetPRLabels" : [ "backport" ], "branchLabelMapping" : { "^v9.0.0$" : "main", - "^v8.17.0$" : "8.x", + "^v8.18.0$" : "8.x", "^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2" } } diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 19e99852869e6..8935872fdec83 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 7dd8269f4ffe6..2dbb7f5193af6 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -320,6 +320,22 @@ steps: env: BWC_VERSION: 8.17.0 + - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.18.0 + - label: "{{matrix.image}} / 9.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v9.0.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 79371d6ddccf5..047e4a3f4f8f6 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -344,6 +344,25 @@ steps: - signal_reason: agent_stop limit: 3 + - label: 8.18.0 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.18.0#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + preemptible: true + env: + BWC_VERSION: 8.18.0 + retry: + automatic: + - exit_status: "-1" + limit: 3 + signal_reason: none + - signal_reason: agent_stop + limit: 3 + - label: 9.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v9.0.0#bwcTest timeout_in_minutes: 300 @@ -429,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -471,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "9.0.0"] + BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 85522e47a523f..ac07e14c2a176 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -17,4 +17,5 @@ BWC_VERSION: - "8.15.4" - "8.16.1" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 9ea3072021bb3..351c605e6e092 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,4 +1,5 @@ BWC_VERSION: - "8.16.1" - "8.17.0" + - "8.18.0" - "9.0.0" diff --git a/docs/reference/migration/migrate_8_18.asciidoc b/docs/reference/migration/migrate_8_18.asciidoc new file mode 100644 index 0000000000000..c989ff9f85b6d --- /dev/null +++ b/docs/reference/migration/migrate_8_18.asciidoc @@ -0,0 +1,20 @@ +[[migrating-8.18]] +== Migrating to 8.18 +++++ +8.18 +++++ + +This section discusses the changes that you need to be aware of when migrating +your application to {es} 8.18. + +See also <> and <>. + +coming::[8.18.0] + + +[discrete] +[[breaking-changes-8.18]] +=== Breaking changes + +There are no breaking changes in {es} 8.18. + diff --git a/docs/reference/migration/migrate_9_0.asciidoc b/docs/reference/migration/migrate_9_0.asciidoc index 6569647fd993e..5048220966bba 100644 --- a/docs/reference/migration/migrate_9_0.asciidoc +++ b/docs/reference/migration/migrate_9_0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[migrating-9.0]] == Migrating to 9.0 ++++ @@ -23,204 +20,229 @@ The following changes in {es} 9.0 might affect your applications and prevent them from operating normally. Before upgrading to 9.0, review these changes and take the described steps to mitigate the impact. -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[set_lenient_to_true_by_default_when_using_updateable_synonyms]] -// .Set lenient to true by default when using updateable synonyms -// [%collapsible] -// ==== -// *Details* + -// When a `synonym` or `synonym_graph` token filter is configured with `updateable: true`, the default `lenient` -// value will now be `true`. -// -// *Impact* + -// `synonym` or `synonym_graph` token filters configured with `updateable: true` will ignore invalid synonyms by -// default. This prevents shard initialization errors on invalid synonyms. -// ==== -// -// [discrete] -// [[breaking_90_mapping_changes]] -// ==== Mapping changes -// -// [[jdk_locale_database_change]] -// .JDK locale database change -// [%collapsible] -// ==== -// *Details* + -// {es} 8.16 changes the version of the JDK that is included from version 22 to version 23. This changes the locale database that is used by Elasticsearch from the COMPAT database to the CLDR database. This change can cause significant differences to the textual date formats accepted by Elasticsearch, and to calculated week-dates. -// -// If you run {es} 8.16 on JDK version 22 or below, it will use the COMPAT locale database to match the behavior of 8.15. However, starting with {es} 9.0, {es} will use the CLDR database regardless of JDK version it is run on. -// -// *Impact* + -// This affects you if you use custom date formats using textual or week-date field specifiers. If you use date fields or calculated week-dates that change between the COMPAT and CLDR databases, then this change will cause Elasticsearch to reject previously valid date fields as invalid data. You might need to modify your ingest or output integration code to account for the differences between these two JDK versions. -// -// Starting in version 8.15.2, Elasticsearch will log deprecation warnings if you are using date format specifiers that might change on upgrading to JDK 23. These warnings are visible in Kibana. -// -// For detailed guidance, refer to <> and the https://ela.st/jdk-23-locales[Elastic blog]. -// ==== -// -// [discrete] -// [[breaking_90_analysis_changes]] -// ==== Analysis changes -// -// [[snowball_stemmers_have_been_upgraded]] -// .Snowball stemmers have been upgraded -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. -// ==== -// -// [[german2_snowball_stemmer_an_alias_for_german_stemmer]] -// .The "german2" snowball stemmer is now an alias for the "german" stemmer -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) -// -// *Impact* + -// Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. -// ==== -// -// [[persian_analyzer_has_stemmer_by_default]] -// .The 'persian' analyzer has stemmer by default -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. -// -// *Impact* + -// Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. -// ==== -// -// [[korean_dictionary_for_nori_has_been_updated]] -// .The Korean dictionary for Nori has been updated -// [%collapsible] -// ==== -// *Details* + -// Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. -// -// *Impact* + -// The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. -// ==== -// -// [discrete] -// [[breaking_90_cluster_and_node_setting_changes]] -// ==== Cluster and node setting changes -// -// [[remove_unsupported_legacy_value_for_discovery_type]] -// .Remove unsupported legacy value for `discovery.type` -// [%collapsible] -// ==== -// *Details* + -// Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. -// -// *Impact* + -// Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. -// ==== -// -// [discrete] -// [[breaking_90_es_ql_changes]] -// ==== ES|QL changes -// -// [[esql_entirely_remove_meta_functions]] -// .ESQL: Entirely remove META FUNCTIONS -// [%collapsible] -// ==== -// *Details* + -// Removes an undocumented syntax from ESQL: META FUNCTION. This was never -// reliable or really useful. Consult the documentation instead. -// -// *Impact* + -// Removes an undocumented syntax from ESQL: META FUNCTION -// ==== -// -// [discrete] -// [[breaking_90_rest_api_changes]] -// ==== REST API changes -// -// [[remove_cluster_state_from_cluster_reroute_response]] -// .Remove cluster state from `/_cluster/reroute` response -// [%collapsible] -// ==== -// *Details* + -// The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. -// -// *Impact* + -// Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. -// ==== -// -// [[remove_deprecated_local_attribute_from_alias_apis]] -// .Remove deprecated local attribute from alias APIs -// [%collapsible] -// ==== -// *Details* + -// The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. -// -// *Impact* + -// Cease usage of the `?local` query parameter when calling the listed APIs. -// ==== -// -// [[reworking_rrf_retriever_to_be_evaluated_during_rewrite_phase]] -// .Reworking RRF retriever to be evaluated during rewrite phase -// [%collapsible] -// ==== -// *Details* + -// In this release (8.16), we have introduced major changes to the retrievers framework -// and how they can be evaluated, focusing mainly on compound retrievers -// like `rrf` and `text_similarity_reranker`, which allowed us to support full -// composability (i.e. any retriever can be nested under any compound retriever), -// as well as supporting additional search features like collapsing, explaining, -// aggregations, and highlighting. -// -// To ensure consistency, and given that this rework is not available until 8.16, -// `rrf` and `text_similarity_reranker` retriever queries would now -// throw an exception in a mixed cluster scenario, where there are nodes -// both in current or later (i.e. >= 8.16) and previous ( <= 8.15) versions. -// -// As part of the rework, we have also removed the `_rank` property from -// the responses of an `rrf` retriever. -// -// *Impact* + -// - Users will not be able to use the `rrf` and `text_similarity_reranker` retrievers in a mixed cluster scenario -// with previous releases (i.e. prior to 8.16), and the request will throw an `IllegalArgumentException`. -// - `_rank` has now been removed from the output of the `rrf` retrievers so trying to directly parse the field -// will throw an exception -// ==== -// -// [[update_data_stream_lifecycle_telemetry_to_track_global_retention]] -// .Update data stream lifecycle telemetry to track global retention -// [%collapsible] -// ==== -// *Details* + -// In this release we introduced global retention settings that fulfil the following criteria: -// -// - a data stream managed by the data stream lifecycle, -// - a data stream that is not an internal data stream. -// -// As a result, we defined different types of retention: -// -// - **data retention**: the retention configured on data stream level by the data stream user or owner -// - **default global retention:** the retention configured by an admin on a cluster level and applied to any -// data stream that doesn't have data retention and fulfils the criteria. -// - **max global retention:** the retention configured by an admin to guard against having long retention periods. -// Any data stream that fulfills the criteria will adhere to the data retention unless it exceeds the max retention, -// in which case the max global retention applies. -// - **effective retention:** the retention that applies on the data stream that fulfill the criteria at a given moment -// in time. It takes into consideration all the retention above and resolves it to the retention that will take effect. -// -// Considering the above changes, having a field named `retention` in the usage API was confusing. For this reason, we -// renamed it to `data_retention` and added telemetry about the other configurations too. -// -// *Impact* + -// Users that use the field `data_lifecycle.retention` should use the `data_lifecycle.data_retention` -// ==== + + +There are no notable breaking changes in {es} 9.0. +But there are some less critical breaking changes. + +[discrete] +[[breaking_90_analysis_changes]] +==== Analysis changes + +[[snowball_stemmers_have_been_upgraded]] +.Snowball stemmers have been upgraded +[%collapsible] +==== +*Details* + +Lucene 10 ships with an upgrade of its Snowball stemmers. For details see https://github.com/apache/lucene/issues/13209. Users using Snowball stemmers that are experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The upgrade should generally provide improved stemming results. Small changes in token analysis can lead to mismatches with previously index data, so existing indices using Snowball stemmers as part of their analysis chain should be reindexed. +==== + +[[german2_snowball_stemmer_an_alias_for_german_stemmer]] +.The "german2" snowball stemmer is now an alias for the "german" stemmer +[%collapsible] +==== +*Details* + +Lucene 10 has merged the improved "german2" snowball language stemmer with the "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for "german". This may results in slightly different tokens being generated for terms with umlaut substitution (like "ue" for "ü" etc...) + +*Impact* + +Replace usages of "german2" with "german" in analysis configuration. Old indices that use the "german" stemmer should be reindexed if possible. +==== + +[[persian_analyzer_has_stemmer_by_default]] +.The 'persian' analyzer has stemmer by default +[%collapsible] +==== +*Details* + +Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch exposes as 'persian' analyzer. Existing indices will keep the old non-stemming behaviour while new indices will see the updated behaviour with added stemming. Users that wish to maintain the non-stemming behaviour need to define their own analyzer as outlined in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. Users that wish to use the new stemming behaviour for existing indices will have to reindex their data. + +*Impact* + +Indexing with the 'persian' analyzer will produce slightly different tokens. Users should check if this impacts their search results. If they wish to maintain the legacy non-stemming behaviour they can define their own analyzer equivalent as explained in https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. +==== + +[[korean_dictionary_for_nori_has_been_updated]] +.The Korean dictionary for Nori has been updated +[%collapsible] +==== +*Details* + +Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). For details see https://github.com/apache/lucene/issues/11452. Users experiencing changes in search behaviour on existing data are advised to reindex. + +*Impact* + +The change is small and should generally provide better analysis results. Existing indices for full-text use cases should be reindexed though. +==== + +[discrete] +[[breaking_90_cluster_and_node_setting_changes]] +==== Cluster and node setting changes + +[[minimum_shard_balancer_threshold_1_0]] +.Minimum shard balancer threshold is now 1.0 +[%collapsible] +==== +*Details* + +Earlier versions of {es} accepted any non-negative value for `cluster.routing.allocation.balance.threshold`, but values smaller than `1.0` do not make sense and have been ignored since version 8.6.1. From 9.0.0 these nonsensical values are now forbidden. + +*Impact* + +Do not set `cluster.routing.allocation.balance.threshold` to a value less than `1.0`. +==== + +[[remove_cluster_routing_allocation_disk_watermark_enable_for_single_data_node_setting]] +.Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting +[%collapsible] +==== +*Details* + +Prior to 7.8, whenever a cluster had only a single data node, the watermarks would not be respected. In order to change this in 7.8+ in a backwards compatible way, we introduced the `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` node setting. The setting was deprecated in 7.14 and was made to accept only true in 8.0 + +*Impact* + +No known end user impact +==== + +[[remove_deprecated_xpack_searchable_snapshot_allocate_on_rolling_restart_setting]] +.Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting +[%collapsible] +==== +*Details* + +The `xpack.searchable.snapshot.allocate_on_rolling_restart` setting was created as an escape-hatch just in case relying on the `cluster.routing.allocation.enable=primaries` setting for allocating searchable snapshots during rolling restarts had some unintended side-effects. It has been deprecated since 8.2.0. + +*Impact* + +Remove `xpack.searchable.snapshot.allocate_on_rolling_restart` from your settings if present. +==== + +[[remove_unsupported_legacy_value_for_discovery_type]] +.Remove unsupported legacy value for `discovery.type` +[%collapsible] +==== +*Details* + +Earlier versions of {es} had a `discovery.type` setting which permitted values that referred to legacy discovery types. From v9.0.0 onwards, the only supported values for this setting are `multi-node` (the default) and `single-node`. + +*Impact* + +Remove any value for `discovery.type` from your `elasticsearch.yml` configuration file. +==== + +[discrete] +[[breaking_90_ingest_changes]] +==== Ingest changes + +[[remove_ecs_option_on_user_agent_processor]] +.Remove `ecs` option on `user_agent` processor +[%collapsible] +==== +*Details* + +The `user_agent` ingest processor no longer accepts the `ecs` option. (It was previously deprecated and ignored.) + +*Impact* + +Users should stop using the `ecs` option when creating instances of the `user_agent` ingest processor. The option will be removed from existing processors stored in the cluster state on upgrade. +==== + +[[remove_ignored_fallback_option_on_geoip_processor]] +.Remove ignored fallback option on GeoIP processor +[%collapsible] +==== +*Details* + +The option fallback_to_default_databases on the geoip ingest processor has been removed. (It was deprecated and ignored since 8.0.0.) + +*Impact* + +Customers should stop remove the noop fallback_to_default_databases option on any geoip ingest processors. +==== + +[discrete] +[[breaking_90_mapping_changes]] +==== Mapping changes + +[[remove_support_for_type_fields_copy_to_boost_in_metadata_field_definition]] +.Remove support for type, fields, copy_to and boost in metadata field definition +[%collapsible] +==== +*Details* + +The type, fields, copy_to and boost parameters are no longer supported in metadata field definition + +*Impact* + +Users providing type, fields, copy_to or boost as part of metadata field definition should remove them from their mappings. +==== + +[discrete] +[[breaking_90_rest_api_changes]] +==== REST API changes + +[[apply_more_strict_parsing_of_actions_in_bulk_api]] +.Apply more strict parsing of actions in bulk API +[%collapsible] +==== +*Details* + +Previously, the following classes of malformed input were deprecated but not rejected in the action lines of the a bulk request: missing closing brace; additional keys after the action (which were ignored); additional data after the closing brace (which was ignored). They will now be considered errors and rejected. + +*Impact* + +Users must provide well-formed input when using the bulk API. (They can request REST API compatibility with v8 to get the previous behaviour back as an interim measure.) +==== + +[[error_json_structure_has_changed_when_detailed_errors_are_disabled]] +.Error JSON structure has changed when detailed errors are disabled +[%collapsible] +==== +*Details* + +This change modifies the JSON format of error messages returned to REST clients +when detailed messages are turned off. +Previously, JSON returned when an exception occurred, and `http.detailed_errors.enabled: false` was set, +just consisted of a single `"error"` text field with some basic information. +Setting `http.detailed_errors.enabled: true` (the default) changed this field +to an object with more detailed information. +With this change, non-detailed errors now have the same structure as detailed errors. `"error"` will now always +be an object with, at a minimum, a `"type"` and `"reason"` field. Additional fields are included when detailed +errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. + +*Impact* + +If you have set `http.detailed_errors.enabled: false` (the default is `true`) +the structure of JSON when any exceptions occur now matches the structure when +detailed errors are enabled. +To use the previous structure for non-detailed errors, use the v8 REST API. +==== + +[[remove_cluster_state_from_cluster_reroute_response]] +.Remove cluster state from `/_cluster/reroute` response +[%collapsible] +==== +*Details* + +The `POST /_cluster/reroute` API no longer returns the cluster state in its response. The `?metric` query parameter to this API now has no effect and its use will be forbidden in a future version. + +*Impact* + +Cease usage of the `?metric` query parameter when calling the `POST /_cluster/reroute` API. +==== + +[[remove_deprecated_local_attribute_from_alias_apis]] +.Remove deprecated local attribute from alias APIs +[%collapsible] +==== +*Details* + +The following APIs no longer accept the `?local` query parameter: `GET /_alias`, `GET /_aliases`, `GET /_alias/{name}`, `HEAD /_alias/{name}`, `GET /{index}/_alias`, `HEAD /{index}/_alias`, `GET /{index}/_alias/{name}`, `HEAD /{index}/_alias/{name}`, `GET /_cat/aliases`, and `GET /_cat/aliases/{alias}`. This parameter has been deprecated and ignored since version 8.12. + +*Impact* + +Cease usage of the `?local` query parameter when calling the listed APIs. +==== + +[[remove_legacy_params_from_range_query]] +.Remove legacy params from range query +[%collapsible] +==== +*Details* + +The deprecated range query parameters `to`, `from`, `include_lower`, and `include_upper` are no longer supported. + +*Impact* + +Users should use `lt`, `lte`, `gt`, and `gte` query parameters instead. +==== + +[[remove_support_for_deprecated_force_source_highlighting_parameter]] +.Remove support for deprecated `force_source` highlighting parameter +[%collapsible] +==== +*Details* + +The deprecated highlighting `force_source` parameter is no longer supported. + +*Impact* + +Users should remove usages of the `force_source` parameter from their search requests. +==== [discrete] @@ -235,85 +257,45 @@ after upgrading to 9.0. To find out if you are using any deprecated functionality, enable <>. -// -// [discrete] -// [[deprecations_90_analysis]] -// ==== Analysis deprecations -// -// [[deprecate_dutch_kp_lovins_stemmer_as_they_are_removed_in_lucene_10]] -// .Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 -// [%collapsible] -// ==== -// *Details* + -// kp, dutch_kp, dutchKp and lovins stemmers are deprecated and will be removed. -// -// *Impact* + -// These stemmers will be removed and will be no longer supported. -// ==== -// -// [[deprecate_edge_ngram_side_parameter]] -// .deprecate `edge_ngram` side parameter -// [%collapsible] -// ==== -// *Details* + -// edge_ngram will no longer accept the side parameter. -// -// *Impact* + -// Users will need to update any usage of edge_ngram token filter that utilizes `side`. If the `back` value was used, they can achieve the same behavior by using the `reverse` token filter. -// ==== -// -// [discrete] -// [[deprecations_90_crud]] -// ==== CRUD deprecations -// -// [[deprecate_dot_prefixed_indices_composable_template_index_patterns]] -// .Deprecate dot-prefixed indices and composable template index patterns -// [%collapsible] -// ==== -// *Details* + -// Indices beginning with a dot '.' are reserved for system and internal indices, and should not be used by and end-user. Additionally, composable index templates that contain patterns for dot-prefixed indices should also be avoided, as these patterns are meant for internal use only. In a future Elasticsearch version, creation of these dot-prefixed indices will no longer be allowed. -// -// *Impact* + -// Requests performing an action that would create an index beginning with a dot (indexing a document, manual creation, reindex), or creating an index template with index patterns beginning with a dot, will contain a deprecation header warning about dot-prefixed indices in the response. -// ==== -// -// [discrete] -// [[deprecations_90_rest_api]] -// ==== REST API deprecations -// -// [[adding_deprecation_warnings_for_rrf_using_rank_sub_searches]] -// .Adding deprecation warnings for rrf using rank and `sub_searches` -// [%collapsible] -// ==== -// *Details* + -// Search API parameter `sub_searches` will no longer be a supported and will be removed in future releases. Similarly, `rrf` can only be used through the specified `retriever` and no longer though the `rank` parameter -// -// *Impact* + -// Requests specifying rrf through `rank` and/or `sub_searches` elements will be disallowed in a future version. Users should instead utilize the new `retriever` parameter. -// ==== -// -// [[deprecate_legacy_params_from_range_query]] -// .Deprecate legacy params from range query -// [%collapsible] -// ==== -// *Details* + -// Range query will not longer accept `to`, `from`, `include_lower`, and `include_upper` parameters. -// -// *Impact* + -// Instead use `gt`, `gte`, `lt` and `lte` parameters. -// ==== -// -// [[inference_api_deprecate_elser_service]] -// .[Inference API] Deprecate elser service -// [%collapsible] -// ==== -// *Details* + -// The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. -// -// *Impact* + -// In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. -// ==== - -// BELOW WAS MANUALLY ADDED TO FIX THE BUILD -include::migrate_9_0/transient-settings-migration-guide.asciidoc[] -//include::migrate_9_0/rest-api-changes.asciidoc[] //see ES-9932 + +[discrete] +[[deprecations_90_mapping]] +==== Mapping deprecations + +[[deprecate_source_mode_in_mappings]] +.Deprecate `_source.mode` in mappings +[%collapsible] +==== +*Details* + +Configuring `_source.mode` in mappings is deprecated and will be removed in future versions. Use `index.mapping.source.mode` index setting instead. + +*Impact* + +Use `index.mapping.source.mode` index setting instead +==== + +[discrete] +[[deprecations_90_rest_api]] +==== REST API deprecations + +[[document_type_deprecated_on_simulate_pipeline_api]] +.Document `_type` deprecated on simulate pipeline API +[%collapsible] +==== +*Details* + +Passing a document with a `_type` property is deprecated in the `/_ingest/pipeline/{id}/_simulate` and `/_ingest/pipeline/_simulate` APIs. + +*Impact* + +Users should already have stopped using mapping types, which were deprecated in {es} 7. This deprecation warning will fire if they specify mapping types on documents pass to the simulate pipeline API. +==== + +[[inference_api_deprecate_elser_service]] +.[Inference API] Deprecate elser service +[%collapsible] +==== +*Details* + +The `elser` service of the inference API will be removed in an upcoming release. Please use the elasticsearch service instead. + +*Impact* + +In the current version there is no impact. In a future version, users of the `elser` service will no longer be able to use it, and will be required to use the `elasticsearch` service to access elser through the inference API. +==== + diff --git a/docs/reference/release-notes/8.18.0.asciidoc b/docs/reference/release-notes/8.18.0.asciidoc new file mode 100644 index 0000000000000..332edfbc23eb7 --- /dev/null +++ b/docs/reference/release-notes/8.18.0.asciidoc @@ -0,0 +1,8 @@ +[[release-notes-8.18.0]] +== {es} version 8.18.0 + +coming[8.18.0] + +Also see <>. + + diff --git a/docs/reference/release-notes/9.0.0.asciidoc b/docs/reference/release-notes/9.0.0.asciidoc index af26fd57385e3..93e5a30cb82f7 100644 --- a/docs/reference/release-notes/9.0.0.asciidoc +++ b/docs/reference/release-notes/9.0.0.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-notes-9.0.0]] == {es} version 9.0.0 @@ -12,546 +9,289 @@ Also see <>. [float] === Breaking changes -// Allocation:: -// * Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) -// -// Analysis:: -// * Set lenient to true by default when using updateable synonyms {es-pull}110901[#110901] -// * Snowball stemmers have been upgraded {es-pull}114146[#114146] -// * The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] -// * The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) -// * The Korean dictionary for Nori has been updated {es-pull}114124[#114124] -// -// Cluster Coordination:: -// * Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] -// -// Data streams:: -// * Update data stream lifecycle telemetry to track global retention {es-pull}112451[#112451] -// -// ES|QL:: -// * ESQL: Entirely remove META FUNCTIONS {es-pull}113967[#113967] -// -// Indices APIs:: -// * Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] -// -// Mapping:: -// * JDK locale database change {es-pull}113975[#113975] -// -// Search:: -// * Adding breaking change entry for retrievers {es-pull}115399[#115399] +Allocation:: +* Increase minimum threshold in shard balancer {es-pull}115831[#115831] +* Remove `cluster.routing.allocation.disk.watermark.enable_for_single_data_node` setting {es-pull}114207[#114207] +* Remove cluster state from `/_cluster/reroute` response {es-pull}114231[#114231] (issue: {es-issue}88978[#88978]) + +Analysis:: +* Snowball stemmers have been upgraded {es-pull}114146[#114146] +* The 'german2' stemmer is now an alias for the 'german' snowball stemmer {es-pull}113614[#113614] +* The 'persian' analyzer has stemmer by default {es-pull}113482[#113482] (issue: {es-issue}113050[#113050]) +* The Korean dictionary for Nori has been updated {es-pull}114124[#114124] + +Cluster Coordination:: +* Remove unsupported legacy value for `discovery.type` {es-pull}112903[#112903] + +Highlighting:: +* Remove support for deprecated `force_source` highlighting parameter {es-pull}116943[#116943] + +Indices APIs:: +* Apply more strict parsing of actions in bulk API {es-pull}115923[#115923] +* Remove deprecated local attribute from alias APIs {es-pull}115393[#115393] + +Infra/REST API:: +* Output a consistent format when generating error json {es-pull}90529[#90529] (issue: {es-issue}89387[#89387]) + +Ingest Node:: +* Remove `ecs` option on `user_agent` processor {es-pull}116077[#116077] +* Remove ignored fallback option on GeoIP processor {es-pull}116112[#116112] + +Mapping:: +* Remove support for type, fields, `copy_to` and boost in metadata field definition {es-pull}116944[#116944] + +Search:: +* Remove legacy params from range query {es-pull}116970[#116970] + +Snapshot/Restore:: +* Remove deprecated `xpack.searchable.snapshot.allocate_on_rolling_restart` setting {es-pull}114202[#114202] [[bug-9.0.0]] [float] === Bug fixes -// -// Aggregations:: -// * Always check the parent breaker with zero bytes in `PreallocatedCircuitBreakerService` {es-pull}115181[#115181] -// * Force using the last centroid during merging {es-pull}111644[#111644] (issue: {es-issue}111065[#111065]) -// -// Authentication:: -// * Check for disabling own user in Put User API {es-pull}112262[#112262] (issue: {es-issue}90205[#90205]) -// * Expose cluster-state role mappings in APIs {es-pull}114951[#114951] -// -// Authorization:: -// * Fix DLS & FLS sometimes being enforced when it is disabled {es-pull}111915[#111915] (issue: {es-issue}94709[#94709]) -// * Fix DLS using runtime fields and synthetic source {es-pull}112341[#112341] -// -// CRUD:: -// * Don't fail retention lease sync actions due to capacity constraints {es-pull}109414[#109414] (issue: {es-issue}105926[#105926]) -// * Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] -// * Standardize error code when bulk body is invalid {es-pull}114869[#114869] -// -// Cluster Coordination:: -// * Ensure clean thread context in `MasterService` {es-pull}114512[#114512] -// -// Data streams:: -// * Adding support for data streams with a match-all template {es-pull}111311[#111311] (issue: {es-issue}111204[#111204]) -// * Exclude internal data streams from global retention {es-pull}112100[#112100] -// * Fix verbose get data stream API not requiring extra privileges {es-pull}112973[#112973] -// * OTel mappings: avoid metrics to be rejected when attributes are malformed {es-pull}114856[#114856] -// * [otel-data] Add more kubernetes aliases {es-pull}115429[#115429] -// * logs-apm.error-*: define log.level field as keyword {es-pull}112440[#112440] -// -// Distributed:: -// * Handle `InternalSendException` inline for non-forking handlers {es-pull}114375[#114375] -// -// EQL:: -// * Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) -// * Fix validation of TEXT fields with case insensitive comparison {es-pull}111238[#111238] (issue: {es-issue}111235[#111235]) -// -// ES|QL:: -// * ESQL: Add Values aggregation tests, fix `ConstantBytesRefBlock` memory handling {es-pull}111367[#111367] -// * ESQL: Align year diffing to the rest of the units in DATE_DIFF: chronological {es-pull}113103[#113103] (issue: {es-issue}112482[#112482]) -// * ESQL: Disable pushdown of WHERE past STATS {es-pull}115308[#115308] (issue: {es-issue}115281[#115281]) -// * ESQL: Fix CASE when conditions are multivalued {es-pull}112401[#112401] (issue: {es-issue}112359[#112359]) -// * ESQL: Fix Double operations returning infinite {es-pull}111064[#111064] (issue: {es-issue}111026[#111026]) -// * ESQL: Fix `REVERSE` with backspace character {es-pull}115245[#115245] (issues: {es-issue}114372[#114372], {es-issue}115227[#115227], {es-issue}115228[#115228]) -// * ESQL: Fix a bug in `MV_PERCENTILE` {es-pull}112218[#112218] (issues: {es-issue}112193[#112193], {es-issue}112180[#112180], {es-issue}112187[#112187], {es-issue}112188[#112188]) -// * ESQL: Fix filtered grouping on ords {es-pull}115312[#115312] (issue: {es-issue}114897[#114897]) -// * ESQL: Fix grammar changes around per agg filtering {es-pull}114848[#114848] -// * ESQL: Fix serialization during `can_match` {es-pull}111779[#111779] (issues: {es-issue}111701[#111701], {es-issue}111726[#111726]) -// * ESQL: Fix synthetic attribute pruning {es-pull}111413[#111413] (issue: {es-issue}105821[#105821]) -// * ESQL: don't lose the original casting error message {es-pull}111968[#111968] (issue: {es-issue}111967[#111967]) -// * ESQL: fix for missing indices error message {es-pull}111797[#111797] (issue: {es-issue}111712[#111712]) -// * ES|QL: Fix stats by constant expression {es-pull}114899[#114899] -// * ES|QL: Restrict sorting for `_source` and counter field types {es-pull}114638[#114638] (issues: {es-issue}114423[#114423], {es-issue}111976[#111976]) -// * ES|QL: better validation for GROK patterns {es-pull}110574[#110574] (issue: {es-issue}110533[#110533]) -// * ES|QL: better validation for RLIKE patterns {es-pull}112489[#112489] (issue: {es-issue}112485[#112485]) -// * ES|QL: better validation of GROK patterns {es-pull}112200[#112200] (issue: {es-issue}112111[#112111]) -// * Fix ST_CENTROID_AGG when no records are aggregated {es-pull}114888[#114888] (issue: {es-issue}106025[#106025]) -// * Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) -// * Spatial search functions support multi-valued fields in compute engine {es-pull}112063[#112063] (issues: {es-issue}112102[#112102], {es-issue}112505[#112505], {es-issue}110830[#110830]) -// * [ES|QL] Check expression resolved before checking its data type in `ImplicitCasting` {es-pull}113314[#113314] (issue: {es-issue}113242[#113242]) -// * [ES|QL] Simplify patterns for subfields {es-pull}111118[#111118] -// * [ES|QL] Simplify syntax of named parameter for identifier and pattern {es-pull}115061[#115061] -// * [ES|QL] Skip validating remote cluster index names in parser {es-pull}114271[#114271] -// * [ES|QL] Use `RangeQuery` and String in `BinaryComparison` on datetime fields {es-pull}110669[#110669] (issue: {es-issue}107900[#107900]) -// * [ES|QL] add tests for stats by constant {es-pull}110593[#110593] (issue: {es-issue}105383[#105383]) -// * [ES|QL] make named parameter for identifier and pattern snapshot {es-pull}114784[#114784] -// * [ES|QL] validate `mv_sort` order {es-pull}110021[#110021] (issue: {es-issue}109910[#109910]) -// -// Geo:: -// * Fix cases of collections with one point {es-pull}111193[#111193] (issue: {es-issue}110982[#110982]) -// -// Health:: -// * Set `replica_unassigned_buffer_time` in constructor {es-pull}112612[#112612] -// -// ILM+SLM:: -// * Make `SnapshotLifecycleStats` immutable so `SnapshotLifecycleMetadata.EMPTY` isn't changed as side-effect {es-pull}111215[#111215] -// -// Indices APIs:: -// * Revert "Add `ResolvedExpression` wrapper" {es-pull}115317[#115317] -// -// Infra/Core:: -// * Fix max file size check to use `getMaxFileSize` {es-pull}113723[#113723] (issue: {es-issue}113705[#113705]) -// * Guard blob store local directory creation with `doPrivileged` {es-pull}115459[#115459] -// * Handle `BigInteger` in xcontent copy {es-pull}111937[#111937] (issue: {es-issue}111812[#111812]) -// * Report JVM stats for all memory pools (97046) {es-pull}115117[#115117] (issue: {es-issue}97046[#97046]) -// * `ByteArrayStreamInput:` Return -1 when there are no more bytes to read {es-pull}112214[#112214] -// -// Infra/Logging:: -// * Only emit product origin in deprecation log if present {es-pull}111683[#111683] (issue: {es-issue}81757[#81757]) -// -// Infra/Metrics:: -// * Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] -// -// Infra/REST API:: -// * Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) -// -// Infra/Settings:: -// * GET _cluster/settings with include_defaults returns the expected fallback value if defined in elasticsearch.yml {es-pull}110816[#110816] (issue: {es-issue}110815[#110815]) -// -// Ingest Node:: -// * Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) -// * Fix IPinfo geolocation schema {es-pull}115147[#115147] -// * Fix `getDatabaseType` for unusual MMDBs {es-pull}112888[#112888] -// * Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] -// -// License:: -// * Fix Start Trial API output acknowledgement header for features {es-pull}111740[#111740] (issue: {es-issue}111739[#111739]) -// * Fix `TokenService` always appearing used in Feature Usage {es-pull}112263[#112263] (issue: {es-issue}61956[#61956]) -// -// Logs:: -// * Do not expand dots when storing objects in ignored source {es-pull}113910[#113910] -// * Fix `ignore_above` handling in synthetic source when index level setting is used {es-pull}113570[#113570] (issue: {es-issue}113538[#113538]) -// * Fix synthetic source for flattened field when used with `ignore_above` {es-pull}113499[#113499] (issue: {es-issue}112044[#112044]) -// -// Machine Learning:: -// * Avoid `ModelAssignment` deadlock {es-pull}109684[#109684] -// * Fix NPE in Get Deployment Stats {es-pull}115404[#115404] -// * Fix bug in ML serverless autoscaling which prevented trained model updates from triggering a scale up {es-pull}110734[#110734] -// * Ignore unrecognized openai sse fields {es-pull}114715[#114715] -// * Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) -// * Prevent NPE if model assignment is removed while waiting to start {es-pull}115430[#115430] -// * Send mid-stream errors to users {es-pull}114549[#114549] -// * Temporarily return both `modelId` and `inferenceId` for GET /_inference until we migrate clients to only `inferenceId` {es-pull}111490[#111490] -// * Warn for model load failures if they have a status code <500 {es-pull}113280[#113280] -// * [Inference API] Remove unused Cohere rerank service settings fields in a BWC way {es-pull}110427[#110427] -// * [ML] Create Inference API will no longer return model_id and now only return inference_id {es-pull}112508[#112508] -// -// Mapping:: -// * Fix `MapperBuilderContext#isDataStream` when used in dynamic mappers {es-pull}110554[#110554] -// * Fix synthetic source field names for multi-fields {es-pull}112850[#112850] -// * Retrieve the source for objects and arrays in a separate parsing phase {es-pull}113027[#113027] (issue: {es-issue}112374[#112374]) -// * Two empty mappings now are created equally {es-pull}107936[#107936] (issue: {es-issue}107031[#107031]) -// -// Ranking:: -// * Fix MLTQuery handling of custom term frequencies {es-pull}110846[#110846] -// * Fix RRF validation for `rank_constant` < 1 {es-pull}112058[#112058] -// * Fix score count validation in reranker response {es-pull}111212[#111212] (issue: {es-issue}111202[#111202]) -// -// Search:: -// * Allow for querries on `_tier` to skip shards in the `can_match` phase {es-pull}114990[#114990] (issue: {es-issue}114910[#114910]) -// * Allow out of range term queries for numeric types {es-pull}112916[#112916] -// * Do not exclude empty arrays or empty objects in source filtering {es-pull}112250[#112250] (issue: {es-issue}109668[#109668]) -// * Fix synthetic source handling for `bit` type in `dense_vector` field {es-pull}114407[#114407] (issue: {es-issue}114402[#114402]) -// * Improve DateTime error handling and add some bad date tests {es-pull}112723[#112723] (issue: {es-issue}112190[#112190]) -// * Improve date expression/remote handling in index names {es-pull}112405[#112405] (issue: {es-issue}112243[#112243]) -// * Make "too many clauses" throw IllegalArgumentException to avoid 500s {es-pull}112678[#112678] (issue: {es-issue}112177[#112177]) -// * Make empty string searches be consistent with case (in)sensitivity {es-pull}110833[#110833] -// * Prevent flattening of ordered and unordered interval sources {es-pull}114234[#114234] -// * Remove needless forking to GENERIC in `TransportMultiSearchAction` {es-pull}110796[#110796] -// * Search/Mapping: KnnVectorQueryBuilder support for allowUnmappedFields {es-pull}107047[#107047] (issue: {es-issue}106846[#106846]) -// * Span term query to convert to match no docs when unmapped field is targeted {es-pull}113251[#113251] -// * Speedup `CanMatchPreFilterSearchPhase` constructor {es-pull}110860[#110860] -// * Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) -// -// Security:: -// * Updated the transport CA name in Security Auto-Configuration. {es-pull}106520[#106520] (issue: {es-issue}106455[#106455]) -// -// Snapshot/Restore:: -// * Retry throttled snapshot deletions {es-pull}113237[#113237] -// -// TSDB:: -// * Implement `parseBytesRef` for `TimeSeriesRoutingHashFieldType` {es-pull}113373[#113373] (issue: {es-issue}112399[#112399]) -// -// Task Management:: -// * Improve handling of failure to create persistent task {es-pull}114386[#114386] -// -// Transform:: -// * Allow task canceling of validate API calls {es-pull}110951[#110951] -// * Include reason when no nodes are found {es-pull}112409[#112409] (issue: {es-issue}112404[#112404]) -// -// Vector Search:: -// * Fix dim validation for bit `element_type` {es-pull}114533[#114533] -// * Support semantic_text in object fields {es-pull}114601[#114601] (issue: {es-issue}114401[#114401]) -// -// Watcher:: -// * Truncating watcher history if it is too large {es-pull}111245[#111245] (issue: {es-issue}94745[#94745]) -// * Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) -// -// [[deprecation-9.0.0]] -// [float] -// === Deprecations -// -// Analysis:: -// * Deprecate dutch_kp and lovins stemmer as they are removed in Lucene 10 {es-pull}113143[#113143] -// * deprecate `edge_ngram` side parameter {es-pull}110829[#110829] -// -// CRUD:: -// * Deprecate dot-prefixed indices and composable template index patterns {es-pull}112571[#112571] -// -// Machine Learning:: -// * [Inference API] Deprecate elser service {es-pull}113216[#113216] -// -// Search:: -// * Adding deprecation warnings for rrf using rank and `sub_searches` {es-pull}114854[#114854] -// * Deprecate legacy params from range query {es-pull}113286[#113286] -// -// [[enhancement-9.0.0]] -// [float] -// === Enhancements -// -// Aggregations:: -// * Account for `DelayedBucket` before reduction {es-pull}113013[#113013] -// * Add protection for OOM during aggregations partial reduction {es-pull}110520[#110520] -// * Deduplicate `BucketOrder` when deserializing {es-pull}112707[#112707] -// * Lower the memory footprint when creating `DelayedBucket` {es-pull}112519[#112519] -// * Reduce heap usage for `AggregatorsReducer` {es-pull}112874[#112874] -// * Remove reduce and `reduceContext` from `DelayedBucket` {es-pull}112547[#112547] -// -// Allocation:: -// * Add link to flood-stage watermark exception message {es-pull}111315[#111315] -// * Always allow rebalancing by default {es-pull}111015[#111015] -// * Only publish desired balance gauges on master {es-pull}115383[#115383] -// -// Application:: -// * [Profiling] add `container.id` field to event index template {es-pull}111969[#111969] -// -// Authorization:: -// * Add manage roles privilege {es-pull}110633[#110633] -// * Add privileges required for CDR misconfiguration features to work on AWS SecurityHub integration {es-pull}112574[#112574] -// * [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] -// -// CRUD:: -// * Suppress merge-on-recovery for older indices {es-pull}113462[#113462] -// -// Codec:: -// * Remove zstd feature flag for index codec best compression {es-pull}112665[#112665] -// -// Data streams:: -// * Add 'verbose' flag retrieving `maximum_timestamp` for get data stream API {es-pull}112303[#112303] -// * Display effective retention in the relevant data stream APIs {es-pull}112019[#112019] -// * Expose global retention settings via data stream lifecycle API {es-pull}112210[#112210] -// * Make ecs@mappings work with OTel attributes {es-pull}111600[#111600] -// -// Distributed:: -// * Add link to Max Shards Per Node exception message {es-pull}110993[#110993] -// * Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] -// -// EQL:: -// * ESQL: Delay construction of warnings {es-pull}114368[#114368] -// -// ES|QL:: -// * Add EXP ES|QL function {es-pull}110879[#110879] -// * Add `CircuitBreaker` to TDigest, Step 3: Connect with ESQL CB {es-pull}113387[#113387] -// * Add `CircuitBreaker` to TDigest, Step 4: Take into account shallow classes size {es-pull}113613[#113613] (issue: {es-issue}113916[#113916]) -// * Collect and display execution metadata for ES|QL cross cluster searches {es-pull}112595[#112595] (issue: {es-issue}112402[#112402]) -// * ESQL: Add support for multivalue fields in Arrow output {es-pull}114774[#114774] -// * ESQL: BUCKET: allow numerical spans as whole numbers {es-pull}111874[#111874] (issues: {es-issue}104646[#104646], {es-issue}109340[#109340], {es-issue}105375[#105375]) -// * ESQL: Have BUCKET generate friendlier intervals {es-pull}111879[#111879] (issue: {es-issue}110916[#110916]) -// * ESQL: Profile more timing information {es-pull}111855[#111855] -// * ESQL: Push down filters even in case of renames in Evals {es-pull}114411[#114411] -// * ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] -// * ESQL: Speed up CASE for some parameters {es-pull}112295[#112295] -// * ESQL: Speed up grouping by bytes {es-pull}114021[#114021] -// * ESQL: Support INLINESTATS grouped on expressions {es-pull}111690[#111690] -// * ESQL: Use less memory in listener {es-pull}114358[#114358] -// * ES|QL: Add support for cached strings in plan serialization {es-pull}112929[#112929] -// * ES|QL: add Telemetry API and track top functions {es-pull}111226[#111226] -// * ES|QL: add metrics for functions {es-pull}114620[#114620] -// * Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function {es-pull}112938[#112938] (issue: {es-issue}109973[#109973]) -// * Siem ea 9521 improve test {es-pull}111552[#111552] -// * Support multi-valued fields in compute engine for ST_DISTANCE {es-pull}114836[#114836] (issue: {es-issue}112910[#112910]) -// * [ESQL] Add `SPACE` function {es-pull}112350[#112350] -// * [ESQL] Add finish() elapsed time to aggregation profiling times {es-pull}113172[#113172] (issue: {es-issue}112950[#112950]) -// * [ESQL] Make query wrapped by `SingleValueQuery` cacheable {es-pull}110116[#110116] -// * [ES|QL] Add hypot function {es-pull}114382[#114382] -// * [ES|QL] Cast mixed numeric types to a common numeric type for Coalesce and In at Analyzer {es-pull}111917[#111917] (issue: {es-issue}111486[#111486]) -// * [ES|QL] Combine Disjunctive CIDRMatch {es-pull}111501[#111501] (issue: {es-issue}105143[#105143]) -// * [ES|QL] Create `Range` in `PushFiltersToSource` for qualified pushable filters on the same field {es-pull}111437[#111437] -// * [ES|QL] Name parameter with leading underscore {es-pull}111950[#111950] (issue: {es-issue}111821[#111821]) -// * [ES|QL] Named parameter for field names and field name patterns {es-pull}112905[#112905] -// * [ES|QL] Validate index name in parser {es-pull}112081[#112081] -// * [ES|QL] add reverse function {es-pull}113297[#113297] -// * [ES|QL] explicit cast a string literal to `date_period` and `time_duration` in arithmetic operations {es-pull}109193[#109193] -// -// Experiences:: -// * Integrate IBM watsonx to Inference API for text embeddings {es-pull}111770[#111770] -// -// Geo:: -// * Add support for spatial relationships in point field mapper {es-pull}112126[#112126] -// * Small performance improvement in h3 library {es-pull}113385[#113385] -// * Support docvalues only query in shape field {es-pull}112199[#112199] -// -// Health:: -// * (API) Cluster Health report `unassigned_primary_shards` {es-pull}112024[#112024] -// * Do not treat replica as unassigned if primary recently created and unassigned time is below a threshold {es-pull}112066[#112066] -// * Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] -// -// ILM+SLM:: -// * ILM: Add `total_shards_per_node` setting to searchable snapshot {es-pull}112972[#112972] (issue: {es-issue}112261[#112261]) -// * PUT slm policy should only increase version if actually changed {es-pull}111079[#111079] -// * Preserve Step Info Across ILM Auto Retries {es-pull}113187[#113187] -// * Register SLM run before snapshotting to save stats {es-pull}110216[#110216] -// * SLM interval schedule followup - add back `getFieldName` style getters {es-pull}112123[#112123] -// -// Infra/Circuit Breakers:: -// * Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] -// -// Infra/Core:: -// * Add nanos support to `ZonedDateTime` serialization {es-pull}111689[#111689] (issue: {es-issue}68292[#68292]) -// * Extend logging for dropped warning headers {es-pull}111624[#111624] (issue: {es-issue}90527[#90527]) -// * Give the kibana system user permission to read security entities {es-pull}114363[#114363] -// -// Infra/Metrics:: -// * Add `TaskManager` to `pluginServices` {es-pull}112687[#112687] -// * Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] -// -// Infra/REST API:: -// * Optimize the loop processing of URL decoding {es-pull}110237[#110237] (issue: {es-issue}110235[#110235]) -// -// Infra/Scripting:: -// * Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] -// * Expose `HexFormat` in Painless {es-pull}112412[#112412] -// -// Infra/Settings:: -// * Improve exception message for bad environment variable placeholders in settings {es-pull}114552[#114552] (issue: {es-issue}110858[#110858]) -// * Reprocess operator file settings when settings service starts, due to node restart or master node change {es-pull}114295[#114295] -// -// Ingest Node:: -// * Add `size_in_bytes` to enrich cache stats {es-pull}110578[#110578] -// * Add support for templates when validating mappings in the simulate ingest API {es-pull}111161[#111161] -// * Adding `index_template_substitutions` to the simulate ingest API {es-pull}114128[#114128] -// * Adding component template substitutions to the simulate ingest API {es-pull}113276[#113276] -// * Adding mapping validation to the simulate ingest API {es-pull}110606[#110606] -// * Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] -// * Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] -// * Adds example plugin for custom ingest processor {es-pull}112282[#112282] (issue: {es-issue}111539[#111539]) -// * Fix unnecessary mustache template evaluation {es-pull}110986[#110986] (issue: {es-issue}110191[#110191]) -// * Listing all available databases in the _ingest/geoip/database API {es-pull}113498[#113498] -// * Make enrich cache based on memory usage {es-pull}111412[#111412] (issue: {es-issue}106081[#106081]) -// * Tag redacted document in ingest metadata {es-pull}113552[#113552] -// * Verify Maxmind database types in the geoip processor {es-pull}114527[#114527] -// -// Logs:: -// * Add validation for synthetic source mode in logs mode indices {es-pull}110677[#110677] -// * Store original source for keywords using a normalizer {es-pull}112151[#112151] -// -// Machine Learning:: -// * Add Completion Inference API for Alibaba Cloud AI Search Model {es-pull}112512[#112512] -// * Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] -// * Add Streaming Inference spec {es-pull}113812[#113812] -// * Add chunking settings configuration to `CohereService,` `AmazonBedrockService,` and `AzureOpenAiService` {es-pull}113897[#113897] -// * Add chunking settings configuration to `ElasticsearchService/ELSER` {es-pull}114429[#114429] -// * Add custom rule parameters to force time shift {es-pull}110974[#110974] -// * Adding chunking settings to `GoogleVertexAiService,` `AzureAiStudioService,` and `AlibabaCloudSearchService` {es-pull}113981[#113981] -// * Adding chunking settings to `MistralService,` `GoogleAiStudioService,` and `HuggingFaceService` {es-pull}113623[#113623] -// * Adds a new Inference API for streaming responses back to the user. {es-pull}113158[#113158] -// * Create `StreamingHttpResultPublisher` {es-pull}112026[#112026] -// * Create an ml node inference endpoint referencing an existing model {es-pull}114750[#114750] -// * Default inference endpoint for ELSER {es-pull}113873[#113873] -// * Default inference endpoint for the multilingual-e5-small model {es-pull}114683[#114683] -// * Enable OpenAI Streaming {es-pull}113911[#113911] -// * Filter empty task settings objects from the API response {es-pull}114389[#114389] -// * Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] -// * Migrate Inference to `ChunkedToXContent` {es-pull}111655[#111655] -// * Register Task while Streaming {es-pull}112369[#112369] -// * Server-Sent Events for Inference response {es-pull}112565[#112565] -// * Stream Anthropic Completion {es-pull}114321[#114321] -// * Stream Azure Completion {es-pull}114464[#114464] -// * Stream Bedrock Completion {es-pull}114732[#114732] -// * Stream Cohere Completion {es-pull}114080[#114080] -// * Stream Google Completion {es-pull}114596[#114596] -// * Stream OpenAI Completion {es-pull}112677[#112677] -// * Support sparse embedding models in the elasticsearch inference service {es-pull}112270[#112270] -// * Switch default chunking strategy to sentence {es-pull}114453[#114453] -// * Upgrade to AWS SDK v2 {es-pull}114309[#114309] (issue: {es-issue}110590[#110590]) -// * Use the same chunking configurations for models in the Elasticsearch service {es-pull}111336[#111336] -// * Validate streaming HTTP Response {es-pull}112481[#112481] -// * Wait for allocation on scale up {es-pull}114719[#114719] -// * [Inference API] Add Alibaba Cloud AI Search Model support to Inference API {es-pull}111181[#111181] -// * [Inference API] Add Docs for AlibabaCloud AI Search Support for the Inference API {es-pull}111181[#111181] -// * [Inference API] Introduce Update API to change some aspects of existing inference endpoints {es-pull}114457[#114457] -// * [Inference API] Prevent inference endpoints from being deleted if they are referenced by semantic text {es-pull}110399[#110399] -// * [Inference API] alibabacloud ai search service support chunk infer to support semantic_text field {es-pull}110399[#110399] -// -// Mapping:: -// * Add Field caps support for Semantic Text {es-pull}111809[#111809] -// * Add Lucene segment-level fields stats {es-pull}111123[#111123] -// * Add Search Inference ID To Semantic Text Mapping {es-pull}113051[#113051] -// * Add object param for keeping synthetic source {es-pull}113690[#113690] -// * Add support for multi-value dimensions {es-pull}112645[#112645] (issue: {es-issue}110387[#110387]) -// * Allow dimension fields to have multiple values in standard and logsdb index mode {es-pull}112345[#112345] (issues: {es-issue}112232[#112232], {es-issue}112239[#112239]) -// * Allow fields with dots in sparse vector field mapper {es-pull}111981[#111981] (issue: {es-issue}109118[#109118]) -// * Allow querying `index_mode` {es-pull}110676[#110676] -// * Configure keeping source in `FieldMapper` {es-pull}112706[#112706] -// * Control storing array source with index setting {es-pull}112397[#112397] -// * Introduce mode `subobjects=auto` for objects {es-pull}110524[#110524] -// * Update `semantic_text` field to support indexing numeric and boolean data types {es-pull}111284[#111284] -// * Use ELSER By Default For Semantic Text {es-pull}113563[#113563] -// * Use fallback synthetic source for `copy_to` and doc_values: false cases {es-pull}112294[#112294] (issues: {es-issue}110753[#110753], {es-issue}110038[#110038], {es-issue}109546[#109546]) -// -// Network:: -// * Add links to network disconnect troubleshooting {es-pull}112330[#112330] -// -// Ranking:: -// * Add timeout and cancellation check to rescore phase {es-pull}115048[#115048] -// -// Recovery:: -// * Trigger merges after recovery {es-pull}113102[#113102] -// -// Relevance:: -// * Add a query rules tester API call {es-pull}114168[#114168] -// -// Search:: -// * Add initial support for `semantic_text` field type {es-pull}113920[#113920] -// * Add more `dense_vector` details for cluster stats field stats {es-pull}113607[#113607] -// * Add range and regexp Intervals {es-pull}111465[#111465] -// * Adding support for `allow_partial_search_results` in PIT {es-pull}111516[#111516] -// * Allow incubating Panama Vector in simdvec, and add vectorized `ipByteBin` {es-pull}112933[#112933] -// * Avoid using concurrent collector manager in `LuceneChangesSnapshot` {es-pull}113816[#113816] -// * Bool query early termination should also consider `must_not` clauses {es-pull}115031[#115031] -// * Deduplicate Kuromoji User Dictionary {es-pull}112768[#112768] -// * Multi term intervals: increase max_expansions {es-pull}112826[#112826] (issue: {es-issue}110491[#110491]) -// * Search coordinator uses `event.ingested` in cluster state to do rewrites {es-pull}111523[#111523] -// * Update cluster stats for retrievers {es-pull}114109[#114109] -// -// Security:: -// * (logger) change from error to warn for short circuiting user {es-pull}112895[#112895] -// * Add asset criticality indices for `kibana_system_user` {es-pull}113588[#113588] -// * Add tier preference to security index settings allowlist {es-pull}111818[#111818] -// * [Service Account] Add `AutoOps` account {es-pull}111316[#111316] -// -// Snapshot/Restore:: -// * Add `max_multipart_parts` setting to S3 repository {es-pull}113989[#113989] -// * Add support for Azure Managed Identity {es-pull}111344[#111344] -// * Add telemetry for repository usage {es-pull}112133[#112133] -// * Add workaround for missing shard gen blob {es-pull}112337[#112337] -// * Clean up dangling S3 multipart uploads {es-pull}111955[#111955] (issues: {es-issue}101169[#101169], {es-issue}44971[#44971]) -// * Execute shard snapshot tasks in shard-id order {es-pull}111576[#111576] (issue: {es-issue}108739[#108739]) -// * Include account name in Azure settings exceptions {es-pull}111274[#111274] -// * Introduce repository integrity verification API {es-pull}112348[#112348] (issue: {es-issue}52622[#52622]) -// * Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] -// * Track shard snapshot progress during node shutdown {es-pull}112567[#112567] -// -// Stats:: -// * Track search and fetch failure stats {es-pull}113988[#113988] -// -// TSDB:: -// * Add support for boolean dimensions {es-pull}111457[#111457] (issue: {es-issue}111338[#111338]) -// * Stop iterating over all fields to extract @timestamp value {es-pull}110603[#110603] (issue: {es-issue}92297[#92297]) -// * Support booleans in routing path {es-pull}111445[#111445] -// -// Vector Search:: -// * Dense vector field types updatable for int4 {es-pull}110928[#110928] -// * Use native scalar scorer for int8_flat index {es-pull}111071[#111071] -// -// [[feature-9.0.0]] -// [float] -// === New features -// -// Data streams:: -// * Introduce global retention in data stream lifecycle. {es-pull}111972[#111972] -// * X-pack/plugin/otel: introduce x-pack-otel plugin {es-pull}111091[#111091] -// -// ES|QL:: -// * Add ESQL match function {es-pull}113374[#113374] -// * ESQL: Add `MV_PSERIES_WEIGHTED_SUM` for score calculations used by security solution {es-pull}109017[#109017] -// * ESQL: Add async ID and `is_running` headers to ESQL async query {es-pull}111840[#111840] -// * ESQL: Add boolean support to Max and Min aggs {es-pull}110527[#110527] -// * ESQL: Add boolean support to TOP aggregation {es-pull}110718[#110718] -// * ESQL: Added `mv_percentile` function {es-pull}111749[#111749] (issue: {es-issue}111591[#111591]) -// * ESQL: INLINESTATS {es-pull}109583[#109583] (issue: {es-issue}107589[#107589]) -// * ESQL: Introduce per agg filter {es-pull}113735[#113735] -// * ESQL: Strings support for MAX and MIN aggregations {es-pull}111544[#111544] -// * ESQL: Support IP fields in MAX and MIN aggregations {es-pull}110921[#110921] -// * ESQL: TOP aggregation IP support {es-pull}111105[#111105] -// * ESQL: TOP support for strings {es-pull}113183[#113183] (issue: {es-issue}109849[#109849]) -// * ESQL: `mv_median_absolute_deviation` function {es-pull}112055[#112055] (issue: {es-issue}111590[#111590]) -// * Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] -// * Search in ES|QL: Add MATCH operator {es-pull}110971[#110971] -// -// ILM+SLM:: -// * SLM Interval based scheduling {es-pull}110847[#110847] -// -// Inference:: -// * EIS integration {es-pull}111154[#111154] -// -// Ingest Node:: -// * Add a `terminate` ingest processor {es-pull}114157[#114157] (issue: {es-issue}110218[#110218]) -// -// Machine Learning:: -// * Inference autoscaling {es-pull}109667[#109667] -// * Telemetry for inference adaptive allocations {es-pull}110630[#110630] -// -// Relevance:: -// * [Query rules] Add `exclude` query rule type {es-pull}111420[#111420] -// -// Search:: -// * Async search: Add ID and "is running" http headers {es-pull}112431[#112431] (issue: {es-issue}109576[#109576]) -// * Cross-cluster search telemetry {es-pull}113825[#113825] -// -// Vector Search:: -// * Adding new bbq index types behind a feature flag {es-pull}114439[#114439] + +Aggregations:: +* Handle with `illegalArgumentExceptions` negative values in HDR percentile aggregations {es-pull}116174[#116174] (issue: {es-issue}115777[#115777]) + +Analysis:: +* Adjust analyze limit exception to be a `bad_request` {es-pull}116325[#116325] + +CCS:: +* Fix long metric deserialize & add - auto-resize needs to be set manually {es-pull}117105[#117105] (issue: {es-issue}116914[#116914]) + +CRUD:: +* Preserve thread context when waiting for segment generation in RTG {es-pull}114623[#114623] +* Standardize error code when bulk body is invalid {es-pull}114869[#114869] + +Data streams:: +* Add missing header in `put_data_lifecycle` rest-api-spec {es-pull}116292[#116292] + +EQL:: +* Don't use a `BytesStreamOutput` to copy keys in `BytesRefBlockHash` {es-pull}114819[#114819] (issue: {es-issue}114599[#114599]) + +ES|QL:: +* Added stricter range type checks and runtime warnings for ENRICH {es-pull}115091[#115091] (issues: {es-issue}107357[#107357], {es-issue}116799[#116799]) +* Don't return TEXT type for functions that take TEXT {es-pull}114334[#114334] (issues: {es-issue}111537[#111537], {es-issue}114333[#114333]) +* ESQL: Fix sorts containing `_source` {es-pull}116980[#116980] (issue: {es-issue}116659[#116659]) +* ESQL: fix the column position in errors {es-pull}117153[#117153] +* ES|QL: Fix stats by constant expression {es-pull}114899[#114899] +* Fix NPE in `EnrichLookupService` on mixed clusters with <8.14 versions {es-pull}116583[#116583] (issues: {es-issue}116529[#116529], {es-issue}116544[#116544]) +* Fix TDigestState.read CB leaks {es-pull}114303[#114303] (issue: {es-issue}114194[#114194]) +* Fixing remote ENRICH by pushing the Enrich inside `FragmentExec` {es-pull}114665[#114665] (issue: {es-issue}105095[#105095]) +* Use `SearchStats` instead of field.isAggregatable in data node planning {es-pull}115744[#115744] (issue: {es-issue}115737[#115737]) +* [ESQL] Fix Binary Comparisons on Date Nanos {es-pull}116346[#116346] +* [ES|QL] To_DatePeriod and To_TimeDuration return better error messages on `union_type` fields {es-pull}114934[#114934] + +Infra/CLI:: +* Fix NPE on plugin sync {es-pull}115640[#115640] (issue: {es-issue}114818[#114818]) + +Infra/Metrics:: +* Make `randomInstantBetween` always return value in range [minInstant, `maxInstant]` {es-pull}114177[#114177] + +Infra/REST API:: +* Fixed a `NullPointerException` in `_capabilities` API when the `path` parameter is null. {es-pull}113413[#113413] (issue: {es-issue}113413[#113413]) + +Infra/Settings:: +* Don't allow secure settings in YML config (109115) {es-pull}115779[#115779] (issue: {es-issue}109115[#109115]) + +Ingest Node:: +* Add warning headers for ingest pipelines containing special characters {es-pull}114837[#114837] (issue: {es-issue}104411[#104411]) +* Reducing error-level stack trace logging for normal events in `GeoIpDownloader` {es-pull}114924[#114924] + +Logs:: +* Always check if index mode is logsdb {es-pull}116922[#116922] +* Prohibit changes to index mode, source, and sort settings during resize {es-pull}115812[#115812] + +Machine Learning:: +* Fix bug in ML autoscaling when some node info is unavailable {es-pull}116650[#116650] +* Fix deberta tokenizer bug caused by bug in normalizer {es-pull}117189[#117189] +* Hides `hugging_face_elser` service from the `GET _inference/_services API` {es-pull}116664[#116664] (issue: {es-issue}116644[#116644]) +* Mitigate IOSession timeouts {es-pull}115414[#115414] (issues: {es-issue}114385[#114385], {es-issue}114327[#114327], {es-issue}114105[#114105], {es-issue}114232[#114232]) +* Propagate scoring function through random sampler {es-pull}116957[#116957] (issue: {es-issue}110134[#110134]) +* Update Deberta tokenizer {es-pull}116358[#116358] +* Wait for up to 2 seconds for yellow status before starting search {es-pull}115938[#115938] (issues: {es-issue}107777[#107777], {es-issue}105955[#105955], {es-issue}107815[#107815], {es-issue}112191[#112191]) + +Mapping:: +* Change synthetic source logic for `constant_keyword` {es-pull}117182[#117182] (issue: {es-issue}117083[#117083]) +* Ignore conflicting fields during dynamic mapping update {es-pull}114227[#114227] (issue: {es-issue}114228[#114228]) + +Network:: +* Use underlying `ByteBuf` `refCount` for `ReleasableBytesReference` {es-pull}116211[#116211] + +Ranking:: +* Propagating nested `inner_hits` to the parent compound retriever {es-pull}116408[#116408] (issue: {es-issue}116397[#116397]) + +Relevance:: +* Fix handling of bulk requests with semantic text fields and delete ops {es-pull}116942[#116942] + +Search:: +* Catch and handle disconnect exceptions in search {es-pull}115836[#115836] +* Fields caps does not honour ignore_unavailable {es-pull}116021[#116021] (issue: {es-issue}107767[#107767]) +* Fix handling of time exceeded exception in fetch phase {es-pull}116676[#116676] +* Fix leak in `DfsQueryPhase` and introduce search disconnect stress test {es-pull}116060[#116060] (issue: {es-issue}115056[#115056]) +* Inconsistency in the _analyzer api when the index is not included {es-pull}115930[#115930] +* Semantic text simple partial update {es-pull}116478[#116478] +* Updated Date Range to Follow Documentation When Assuming Missing Values {es-pull}112258[#112258] (issue: {es-issue}111484[#111484]) +* Validate missing shards after the coordinator rewrite {es-pull}116382[#116382] +* _validate does not honour ignore_unavailable {es-pull}116656[#116656] (issue: {es-issue}116594[#116594]) + +Snapshot/Restore:: +* Retry throttled snapshot deletions {es-pull}113237[#113237] + +Vector Search:: +* Update Semantic Query To Handle Zero Size Responses {es-pull}116277[#116277] (issue: {es-issue}116083[#116083]) + +Watcher:: +* Watch Next Run Interval Resets On Shard Move or Node Restart {es-pull}115102[#115102] (issue: {es-issue}111433[#111433]) + +[[deprecation-9.0.0]] +[float] +=== Deprecations + +Ingest Node:: +* Fix `_type` deprecation on simulate pipeline API {es-pull}116259[#116259] + +Machine Learning:: +* [Inference API] Deprecate elser service {es-pull}113216[#113216] + +Mapping:: +* Deprecate `_source.mode` in mappings {es-pull}116689[#116689] + +[[enhancement-9.0.0]] +[float] +=== Enhancements + +Allocation:: +* Only publish desired balance gauges on master {es-pull}115383[#115383] + +Authorization:: +* Add a `monitor_stats` privilege and allow that privilege for remote cluster privileges {es-pull}114964[#114964] +* [Security Solution] Add `create_index` to `kibana_system` role for index/DS `.logs-endpoint.action.responses-*` {es-pull}115241[#115241] + +CRUD:: +* Suppress merge-on-recovery for older indices {es-pull}113462[#113462] + +Data streams:: +* Adding a deprecation info API warning for data streams with old indices {es-pull}116447[#116447] +* Apm-data: disable date_detection for all apm data streams {es-pull}116995[#116995] + +Distributed:: +* Metrics for incremental bulk splits {es-pull}116765[#116765] +* Use Azure blob batch API to delete blobs in batches {es-pull}114566[#114566] + +ES|QL:: +* Add ES|QL `bit_length` function {es-pull}115792[#115792] +* ESQL: Honor skip_unavailable setting for nonmatching indices errors at planning time {es-pull}116348[#116348] (issue: {es-issue}114531[#114531]) +* ESQL: Remove parent from `FieldAttribute` {es-pull}112881[#112881] +* ESQL: extract common filter from aggs {es-pull}115678[#115678] +* ESQL: optimise aggregations filtered by false/null into evals {es-pull}115858[#115858] +* ES|QL CCS uses `skip_unavailable` setting for handling disconnected remote clusters {es-pull}115266[#115266] (issue: {es-issue}114531[#114531]) +* ES|QL: add metrics for functions {es-pull}114620[#114620] +* Esql Enable Date Nanos (tech preview) {es-pull}117080[#117080] +* Support partial sort fields in TopN pushdown {es-pull}116043[#116043] (issue: {es-issue}114515[#114515]) +* [ES|QL] Implicit casting string literal to intervals {es-pull}115814[#115814] (issue: {es-issue}115352[#115352]) + +Health:: +* Increase `replica_unassigned_buffer_time` default from 3s to 5s {es-pull}112834[#112834] + +Indices APIs:: +* Ensure class resource stream is closed in `ResourceUtils` {es-pull}116437[#116437] + +Inference:: +* Add version prefix to Inference Service API path {es-pull}117095[#117095] + +Infra/Circuit Breakers:: +* Add link to Circuit Breaker "Data too large" exception message {es-pull}113561[#113561] + +Infra/Core:: +* Support for unsigned 64 bit numbers in Cpu stats {es-pull}114681[#114681] (issue: {es-issue}112274[#112274]) + +Infra/Metrics:: +* Add `ensureGreen` test method for use with `adminClient` {es-pull}113425[#113425] + +Infra/Scripting:: +* Add a `mustache.max_output_size_bytes` setting to limit the length of results from mustache scripts {es-pull}114002[#114002] + +Ingest Node:: +* Add postal_code support to the City and Enterprise databases {es-pull}114193[#114193] +* Add support for registered country fields for maxmind geoip databases {es-pull}114521[#114521] +* Adding support for additional mapping to simulate ingest API {es-pull}114742[#114742] +* Adding support for simulate ingest mapping adddition for indices with mappings that do not come from templates {es-pull}115359[#115359] +* Support IPinfo database configurations {es-pull}114548[#114548] +* Support more maxmind fields in the geoip processor {es-pull}114268[#114268] + +Logs:: +* Add logsdb telemetry {es-pull}115994[#115994] +* Add num docs and size to logsdb telemetry {es-pull}116128[#116128] +* Feature: re-structure document ID generation favoring _id inverted index compression {es-pull}104683[#104683] + +Machine Learning:: +* Add DeBERTa-V2/V3 tokenizer {es-pull}111852[#111852] +* Add special case for elastic reranker in inference API {es-pull}116962[#116962] +* Adding inference endpoint validation for `AzureAiStudioService` {es-pull}113713[#113713] +* Adds support for `input_type` field to Vertex inference service {es-pull}116431[#116431] +* Enable built-in Inference Endpoints and default for Semantic Text {es-pull}116931[#116931] +* Increase default `queue_capacity` to 10_000 and decrease max `queue_capacity` to 100_000 {es-pull}115041[#115041] +* Inference duration and error metrics {es-pull}115876[#115876] +* Remove all mentions of eis and gateway and deprecate flags that do {es-pull}116692[#116692] +* [Inference API] Add API to get configuration of inference services {es-pull}114862[#114862] +* [Inference API] Improve chunked results error message {es-pull}115807[#115807] + +Network:: +* Allow http unsafe buffers by default {es-pull}116115[#116115] + +Recovery:: +* Attempt to clean up index before remote transfer {es-pull}115142[#115142] (issue: {es-issue}104473[#104473]) +* Trigger merges after recovery {es-pull}113102[#113102] + +Reindex:: +* Change Reindexing metrics unit from millis to seconds {es-pull}115721[#115721] + +Relevance:: +* Add query rules retriever {es-pull}114855[#114855] +* Add tracking for query rule types {es-pull}116357[#116357] + +Search:: +* Add Search Phase APM metrics {es-pull}113194[#113194] +* Add `docvalue_fields` Support for `dense_vector` Fields {es-pull}114484[#114484] (issue: {es-issue}108470[#108470]) +* Add initial support for `semantic_text` field type {es-pull}113920[#113920] +* Adds access to flags no_sub_matches and no_overlapping_matches to hyphenation-decompounder-tokenfilter {es-pull}115459[#115459] (issue: {es-issue}97849[#97849]) +* Better sizing `BytesRef` for Strings in Queries {es-pull}115655[#115655] +* Enable `_tier` based coordinator rewrites for all indices (not just mounted indices) {es-pull}115797[#115797] +* Only aggregations require at least one shard request {es-pull}115314[#115314] + +Security:: +* Add refresh `.security` index call between security migrations {es-pull}114879[#114879] + +Snapshot/Restore:: +* Improve message about insecure S3 settings {es-pull}116915[#116915] +* Retry `S3BlobContainer#getRegister` on all exceptions {es-pull}114813[#114813] +* Split searchable snapshot into multiple repo operations {es-pull}116918[#116918] +* Track shard snapshot progress during node shutdown {es-pull}112567[#112567] + +Vector Search:: +* Add support for bitwise inner-product in painless {es-pull}116082[#116082] + +[[feature-9.0.0]] +[float] +=== New features + +Data streams:: +* Add default ILM policies and switch to ILM for apm-data plugin {es-pull}115687[#115687] + +ES|QL:: +* Add support for `BYTE_LENGTH` scalar function {es-pull}116591[#116591] +* Esql/lookup join grammar {es-pull}116515[#116515] +* Remove snapshot build restriction for match and qstr functions {es-pull}114482[#114482] + +Search:: +* ESQL - Add match operator (:) {es-pull}116819[#116819] [[upgrade-9.0.0]] [float] === Upgrades -// -// Infra/Core:: -// * Upgrade xcontent to Jackson 2.17.0 {es-pull}111948[#111948] -// * Upgrade xcontent to Jackson 2.17.2 {es-pull}112320[#112320] -// -// Infra/Metrics:: -// * Update APM Java Agent to support JDK 23 {es-pull}115194[#115194] (issues: {es-issue}115101[#115101], {es-issue}115100[#115100]) -// -// Search:: -// * Upgrade to Lucene 10 {es-pull}114741[#114741] -// * Upgrade to Lucene 9.12 {es-pull}113333[#113333] -// -// Snapshot/Restore:: -// * Upgrade Azure SDK {es-pull}111225[#111225] -// * Upgrade `repository-azure` dependencies {es-pull}112277[#112277] + +Search:: +* Upgrade to Lucene 10 {es-pull}114741[#114741] diff --git a/docs/reference/release-notes/highlights.asciidoc b/docs/reference/release-notes/highlights.asciidoc index edecd4f727583..b87081639c684 100644 --- a/docs/reference/release-notes/highlights.asciidoc +++ b/docs/reference/release-notes/highlights.asciidoc @@ -1,6 +1,3 @@ -// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY. -// The content generated here are is not correct and most has been manually commented out until it can be fixed. -// See ES-9931 for more details. [[release-highlights]] == What's new in {minor-version} @@ -12,163 +9,14 @@ For detailed information about this release, see the <> and <>. endif::[] -// -// // tag::notable-highlights[] -// -// [discrete] -// [[esql_inlinestats]] -// === ESQL: INLINESTATS -// This adds the `INLINESTATS` command to ESQL which performs a STATS and -// then enriches the results into the output stream. So, this query: -// -// [source,esql] -// ---- -// FROM test -// | INLINESTATS m=MAX(a * b) BY b -// | WHERE m == a * b -// | SORT a DESC, b DESC -// | LIMIT 3 -// ---- -// -// Produces output like: -// -// | a | b | m | -// | --- | --- | ----- | -// | 99 | 999 | 98901 | -// | 99 | 998 | 98802 | -// | 99 | 997 | 98703 | -// -// {es-pull}109583[#109583] -// -// [discrete] -// [[always_allow_rebalancing_by_default]] -// === Always allow rebalancing by default -// In earlier versions of {es} the `cluster.routing.allocation.allow_rebalance` setting defaults to -// `indices_all_active` which blocks all rebalancing moves while the cluster is in `yellow` or `red` health. This was -// appropriate for the legacy allocator which might do too many rebalancing moves otherwise. Today's allocator has -// better support for rebalancing a cluster that is not in `green` health, and expects to be able to rebalance some -// shards away from over-full nodes to avoid allocating shards to undesirable locations in the first place. From -// version 8.16 `allow_rebalance` setting defaults to `always` unless the legacy allocator is explicitly enabled. -// -// {es-pull}111015[#111015] -// -// [discrete] -// [[add_global_retention_in_data_stream_lifecycle]] -// === Add global retention in data stream lifecycle -// Data stream lifecycle now supports configuring retention on a cluster level, -// namely global retention. Global retention \nallows us to configure two different -// retentions: -// -// - `data_streams.lifecycle.retention.default` is applied to all data streams managed -// by the data stream lifecycle that do not have retention defined on the data stream level. -// - `data_streams.lifecycle.retention.max` is applied to all data streams managed by the -// data stream lifecycle and it allows any data stream \ndata to be deleted after the `max_retention` has passed. -// -// {es-pull}111972[#111972] -// -// [discrete] -// [[enable_zstandard_compression_for_indices_with_index_codec_set_to_best_compression]] -// === Enable ZStandard compression for indices with index.codec set to best_compression -// Before DEFLATE compression was used to compress stored fields in indices with index.codec index setting set to -// best_compression, with this change ZStandard is used as compression algorithm to stored fields for indices with -// index.codec index setting set to best_compression. The usage ZStandard results in less storage usage with a -// similar indexing throughput depending on what options are used. Experiments with indexing logs have shown that -// ZStandard offers ~12% lower storage usage and a ~14% higher indexing throughput compared to DEFLATE. -// -// {es-pull}112665[#112665] -// -// [discrete] -// [[esql_introduce_per_agg_filter]] -// === ESQL: Introduce per agg filter -// Add support for aggregation scoped filters that work dynamically on the -// data in each group. -// -// [source,esql] -// ---- -// | STATS success = COUNT(*) WHERE 200 <= code AND code < 300, -// redirect = COUNT(*) WHERE 300 <= code AND code < 400, -// client_err = COUNT(*) WHERE 400 <= code AND code < 500, -// server_err = COUNT(*) WHERE 500 <= code AND code < 600, -// total_count = COUNT(*) -// ---- -// -// Implementation wise, the base AggregateFunction has been extended to -// allow a filter to be passed on. This is required to incorporate the -// filter as part of the aggregate equality/identity which would fail with -// the filter as an external component. -// As part of the process, the serialization for the existing aggregations -// had to be fixed so AggregateFunction implementations so that it -// delegates to their parent first. -// -// {es-pull}113735[#113735] -// -// // end::notable-highlights[] -// -// -// [discrete] -// [[esql_multi_value_fields_supported_in_geospatial_predicates]] -// === ESQL: Multi-value fields supported in Geospatial predicates -// Supporting multi-value fields in `WHERE` predicates is a challenge due to not knowing whether `ALL` or `ANY` -// of the values in the field should pass the predicate. -// For example, should the field `age:[10,30]` pass the predicate `WHERE age>20` or not? -// This ambiguity does not exist with the spatial predicates -// `ST_INTERSECTS` and `ST_DISJOINT`, because the choice between `ANY` or `ALL` -// is implied by the predicate itself. -// Consider a predicate checking a field named `location` against a test geometry named `shape`: -// -// * `ST_INTERSECTS(field, shape)` - true if `ANY` value can intersect the shape -// * `ST_DISJOINT(field, shape)` - true only if `ALL` values are disjoint from the shape -// -// This works even if the shape argument is itself a complex or compound geometry. -// -// Similar logic exists for `ST_CONTAINS` and `ST_WITHIN` predicates, but these are not as easily solved -// with `ANY` or `ALL`, because a collection of geometries contains another collection if each of the contained -// geometries is within at least one of the containing geometries. Evaluating this requires that the multi-value -// field is first combined into a single geometry before performing the predicate check. -// -// * `ST_CONTAINS(field, shape)` - true if the combined geometry contains the shape -// * `ST_WITHIN(field, shape)` - true if the combined geometry is within the shape -// -// {es-pull}112063[#112063] -// -// [discrete] -// [[enhance_sort_push_down_to_lucene_to_cover_references_to_fields_st_distance_function]] -// === Enhance SORT push-down to Lucene to cover references to fields and ST_DISTANCE function -// The most used and likely most valuable geospatial search query in Elasticsearch is the sorted proximity search, -// finding items within a certain distance of a point of interest and sorting the results by distance. -// This has been possible in ES|QL since 8.15.0, but the sorting was done in-memory, not pushed down to Lucene. -// Now the sorting is pushed down to Lucene, which results in a significant performance improvement. -// -// Queries that perform both filtering and sorting on distance are supported. For example: -// -// [source,esql] -// ---- -// FROM test -// | EVAL distance = ST_DISTANCE(location, TO_GEOPOINT("POINT(37.7749, -122.4194)")) -// | WHERE distance < 1000000 -// | SORT distance ASC, name DESC -// | LIMIT 10 -// ---- -// -// In addition, the support for sorting on EVAL expressions has been extended to cover references to fields: -// -// [source,esql] -// ---- -// FROM test -// | EVAL ref = field -// | SORT ref ASC -// | LIMIT 10 -// ---- -// -// {es-pull}112938[#112938] -// + +// The notable-highlights tag marks entries that +// should be featured in the Stack Installation and Upgrade Guide: +// tag::notable-highlights[] // [discrete] -// [[cross_cluster_search_telemetry]] -// === Cross-cluster search telemetry -// The cross-cluster search telemetry is collected when cross-cluster searches -// are performed, and is returned as "ccs" field in `_cluster/stats` output. -// It also add a new parameter `include_remotes=true` to the `_cluster/stats` API -// which will collect data from connected remote clusters. +// === Heading // -// {es-pull}113825[#113825] +// Description. +// end::notable-highlights[] + From 573b8a9252e55e24c1c34e9e734b37aafd077e83 Mon Sep 17 00:00:00 2001 From: Dan Rubinstein Date: Thu, 21 Nov 2024 14:47:24 -0500 Subject: [PATCH 13/50] Adding chunking settings to IbmWatsonxService (#114914) * Adding chunking settings to IbmWatsonxService * Removing feature flag * Update docs/changelog/114914.yaml --------- Co-authored-by: Elastic Machine --- docs/changelog/114914.yaml | 5 + .../ibmwatsonx/IbmWatsonxService.java | 29 ++- .../embeddings/IbmWatsonxEmbeddingsModel.java | 6 +- .../ibmwatsonx/IbmWatsonxServiceTests.java | 173 +++++++++++++++++- .../IbmWatsonxEmbeddingsModelTests.java | 1 + 5 files changed, 211 insertions(+), 3 deletions(-) create mode 100644 docs/changelog/114914.yaml diff --git a/docs/changelog/114914.yaml b/docs/changelog/114914.yaml new file mode 100644 index 0000000000000..bad13e26682dc --- /dev/null +++ b/docs/changelog/114914.yaml @@ -0,0 +1,5 @@ +pr: 114914 +summary: Adding chunking settings to `IbmWatsonxService` +area: Machine Learning +type: enhancement +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index e960b0b777f2b..f4f4605c667c3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptySettingsConfiguration; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -30,6 +31,7 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationDisplayType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.inference.chunking.ChunkingSettingsBuilder; import org.elasticsearch.xpack.inference.chunking.EmbeddingRequestChunker; import org.elasticsearch.xpack.inference.external.action.ibmwatsonx.IbmWatsonxActionCreator; import org.elasticsearch.xpack.inference.external.http.sender.DocumentsOnlyInput; @@ -86,11 +88,19 @@ public void parseRequestConfig( Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap( + removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS) + ); + } + IbmWatsonxModel model = createModel( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, serviceSettingsMap, TaskType.unsupportedTaskTypeErrorMsg(taskType, NAME), ConfigurationParseContext.REQUEST @@ -112,6 +122,7 @@ private static IbmWatsonxModel createModel( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, @Nullable Map secretSettings, String failureMessage, ConfigurationParseContext context @@ -123,6 +134,7 @@ private static IbmWatsonxModel createModel( NAME, serviceSettings, taskSettings, + chunkingSettings, secretSettings, context ); @@ -141,11 +153,17 @@ public IbmWatsonxModel parsePersistedConfigWithSecrets( Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); Map secretSettingsMap = removeFromMapOrDefaultEmpty(secrets, ModelSecrets.SECRET_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, secretSettingsMap, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -166,6 +184,7 @@ private static IbmWatsonxModel createModelFromPersistent( TaskType taskType, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secretSettings, String failureMessage ) { @@ -174,6 +193,7 @@ private static IbmWatsonxModel createModelFromPersistent( taskType, serviceSettings, taskSettings, + chunkingSettings, secretSettings, failureMessage, ConfigurationParseContext.PERSISTENT @@ -185,11 +205,17 @@ public Model parsePersistedConfig(String inferenceEntityId, TaskType taskType, M Map serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); Map taskSettingsMap = removeFromMapOrDefaultEmpty(config, ModelConfigurations.TASK_SETTINGS); + ChunkingSettings chunkingSettings = null; + if (TaskType.TEXT_EMBEDDING.equals(taskType)) { + chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMapOrDefaultEmpty(config, ModelConfigurations.CHUNKING_SETTINGS)); + } + return createModelFromPersistent( inferenceEntityId, taskType, serviceSettingsMap, taskSettingsMap, + chunkingSettings, null, parsePersistedConfigErrorMsg(inferenceEntityId, NAME) ); @@ -266,7 +292,8 @@ protected void doChunkedInfer( var batchedRequests = new EmbeddingRequestChunker( input.getInputs(), EMBEDDING_MAX_BATCH_SIZE, - EmbeddingRequestChunker.EmbeddingType.FLOAT + EmbeddingRequestChunker.EmbeddingType.FLOAT, + model.getConfigurations().getChunkingSettings() ).batchRequestsWithListeners(listener); for (var request : batchedRequests) { var action = ibmWatsonxModel.accept(getActionCreator(getSender(), getServiceComponents()), taskSettings, inputType); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java index d60e31b5d41c0..6b20e07ecc0a2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModel.java @@ -9,6 +9,7 @@ import org.apache.http.client.utils.URIBuilder; import org.elasticsearch.core.Nullable; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InputType; import org.elasticsearch.inference.ModelConfigurations; @@ -40,6 +41,7 @@ public IbmWatsonxEmbeddingsModel( String service, Map serviceSettings, Map taskSettings, + ChunkingSettings chunkingSettings, Map secrets, ConfigurationParseContext context ) { @@ -49,6 +51,7 @@ public IbmWatsonxEmbeddingsModel( service, IbmWatsonxEmbeddingsServiceSettings.fromMap(serviceSettings, context), EmptyTaskSettings.INSTANCE, + chunkingSettings, DefaultSecretSettings.fromMap(secrets) ); } @@ -64,10 +67,11 @@ public IbmWatsonxEmbeddingsModel(IbmWatsonxEmbeddingsModel model, IbmWatsonxEmbe String service, IbmWatsonxEmbeddingsServiceSettings serviceSettings, TaskSettings taskSettings, + ChunkingSettings chunkingsettings, @Nullable DefaultSecretSettings secrets ) { super( - new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings), + new ModelConfigurations(inferenceEntityId, taskType, service, serviceSettings, taskSettings, chunkingsettings), new ModelSecrets(secrets), serviceSettings ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index d6c491f2b7cec..f7f37c5bcd15f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.ChunkedInferenceServiceResults; import org.elasticsearch.inference.ChunkingOptions; +import org.elasticsearch.inference.ChunkingSettings; import org.elasticsearch.inference.EmptyTaskSettings; import org.elasticsearch.inference.InferenceServiceConfiguration; import org.elasticsearch.inference.InferenceServiceResults; @@ -69,6 +70,8 @@ import static org.elasticsearch.xpack.inference.Utils.getPersistedConfigMap; import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettings; +import static org.elasticsearch.xpack.inference.chunking.ChunkingSettingsTests.createRandomChunkingSettingsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; @@ -124,6 +127,7 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); service.parseRequestConfig( @@ -150,6 +154,45 @@ public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModel() throws IO } } + public void testParseRequestConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + ActionListener modelListener = ActionListener.wrap(model -> { + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + }, e -> fail("Model parsing should have succeeded, but failed: " + e.getMessage())); + + service.parseRequestConfig( + "id", + TaskType.TEXT_EMBEDDING, + getRequestConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + new HashMap<>(Map.of()), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ), + modelListener + ); + } + } + public void testParseRequestConfig_ThrowsUnsupportedModelType() throws IOException { try (var service = createIbmWatsonxService()) { var failureListener = getModelListenerForException( @@ -235,6 +278,47 @@ public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsMode assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfigWithSecrets_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + getSecretSettingsMap(apiKey) + ); + + var model = service.parsePersistedConfigWithSecrets( + "id", + TaskType.TEXT_EMBEDDING, + persistedConfig.config(), + persistedConfig.secrets() + ); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getSecretSettings().apiKey().toString(), is(apiKey)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); } } @@ -399,6 +483,73 @@ public void testParsePersistedConfigWithSecrets_DoesNotThrowWhenAnExtraKeyExists } } + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsNotProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + + public void testParsePersistedConfig_CreatesAIbmWatsonxEmbeddingsModelWhenChunkingSettingsProvided() throws IOException { + try (var service = createIbmWatsonxService()) { + var persistedConfig = getPersistedConfigMap( + new HashMap<>( + Map.of( + ServiceFields.MODEL_ID, + modelId, + IbmWatsonxServiceFields.PROJECT_ID, + projectId, + ServiceFields.URL, + url, + IbmWatsonxServiceFields.API_VERSION, + apiVersion + ) + ), + getTaskSettingsMapEmpty(), + createRandomChunkingSettingsMap(), + null + ); + + var model = service.parsePersistedConfig("id", TaskType.TEXT_EMBEDDING, persistedConfig.config()); + + assertThat(model, instanceOf(IbmWatsonxEmbeddingsModel.class)); + + var embeddingsModel = (IbmWatsonxEmbeddingsModel) model; + assertThat(embeddingsModel.getServiceSettings().modelId(), is(modelId)); + assertThat(embeddingsModel.getServiceSettings().projectId(), is(projectId)); + assertThat(embeddingsModel.getServiceSettings().url(), is(URI.create(url))); + assertThat(embeddingsModel.getServiceSettings().apiVersion(), is(apiVersion)); + assertThat(embeddingsModel.getTaskSettings(), is(EmptyTaskSettings.INSTANCE)); + assertThat(embeddingsModel.getConfigurations().getChunkingSettings(), instanceOf(ChunkingSettings.class)); + } + } + public void testInfer_ThrowsErrorWhenModelIsNotIbmWatsonxModel() throws IOException { var sender = mock(Sender.class); @@ -488,7 +639,15 @@ public void testInfer_SendsEmbeddingsRequest() throws IOException { } } - public void testChunkedInfer_Batches() throws IOException { + public void testChunkedInfer_ChunkingSettingsNotSet() throws IOException { + testChunkedInfer_Batches(null); + } + + public void testChunkedInfer_ChunkingSettingsSet() throws IOException { + testChunkedInfer_Batches(createRandomChunkingSettings()); + } + + private void testChunkedInfer_Batches(ChunkingSettings chunkingSettings) throws IOException { var input = List.of("foo", "bar"); var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); @@ -878,6 +1037,18 @@ private static ActionListener getModelListenerForException(Class excep }); } + private Map getRequestConfigMap( + Map serviceSettings, + Map taskSettings, + Map chunkingSettings, + Map secretSettings + ) { + var requestConfigMap = getRequestConfigMap(serviceSettings, taskSettings, secretSettings); + requestConfigMap.put(ModelConfigurations.CHUNKING_SETTINGS, chunkingSettings); + + return requestConfigMap; + } + private Map getRequestConfigMap( Map serviceSettings, Map taskSettings, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java index 93fd7e402a0de..33fcd752fbf30 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/embeddings/IbmWatsonxEmbeddingsModelTests.java @@ -82,6 +82,7 @@ public static IbmWatsonxEmbeddingsModel createModel( null ), EmptyTaskSettings.INSTANCE, + null, new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } From 6d963d324aa71a514cd4baa01bd0805eaba7426e Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 21 Nov 2024 12:50:32 -0800 Subject: [PATCH 14/50] Limit thread queue during init in ExchangeSource (#117273) ES|QL doesn't work well with 500 clusters or clusters with 500 nodes. The reason is that we enqueue three tasks to the thread pool queue, which has a limit of 1000, during the initialization of the exchange for each target (cluster or node). This simple PR reduces it to one task. I'm considering using AsyncProcessor for these requests, but that will be a follow-up issue for later. --- .../exchange/ExchangeSourceHandler.java | 23 ++++++++++--------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java index e3fc0e26e34e0..4baaf9ad89bd6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeSourceHandler.java @@ -220,20 +220,21 @@ void onSinkComplete() { * @see ExchangeSinkHandler#fetchPageAsync(boolean, ActionListener) */ public void addRemoteSink(RemoteSink remoteSink, int instances) { - for (int i = 0; i < instances; i++) { - var fetcher = new RemoteSinkFetcher(remoteSink); - fetchExecutor.execute(new AbstractRunnable() { - @Override - public void onFailure(Exception e) { - fetcher.onSinkFailed(e); - } + fetchExecutor.execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + failure.unwrapAndCollect(e); + buffer.waitForReading().listener().onResponse(null); // resume the Driver if it is being blocked on reading + } - @Override - protected void doRun() { + @Override + protected void doRun() { + for (int i = 0; i < instances; i++) { + var fetcher = new RemoteSinkFetcher(remoteSink); fetcher.fetchPage(); } - }); - } + } + }); } /** From 8e6e087ecc3ec883430b152622deab728f3f64bd Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Thu, 21 Nov 2024 22:09:03 +0100 Subject: [PATCH 15/50] Remove StringQueryPredicate (#117134) * Remove StringQueryPredicate * Fix tests --- .../fulltext/StringQueryPredicate.java | 62 ------------------- .../core/planner/ExpressionTranslators.java | 14 ----- .../core/querydsl/query/QueryStringQuery.java | 16 ++--- .../querydsl/query/QueryStringQueryTests.java | 20 +++--- .../function/fulltext/FullTextWritables.java | 9 +-- .../physical/local/PushFiltersToSource.java | 3 - .../planner/EsqlExpressionTranslators.java | 3 +- .../StringQuerySerializationTests.java | 34 ---------- 8 files changed, 15 insertions(+), 146 deletions(-) delete mode 100644 x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java deleted file mode 100644 index 95000a5364e12..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/fulltext/StringQueryPredicate.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.fulltext; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; - -import java.io.IOException; -import java.util.List; -import java.util.Map; - -import static java.util.Collections.emptyList; - -public final class StringQueryPredicate extends FullTextPredicate { - - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( - Expression.class, - "StringQueryPredicate", - StringQueryPredicate::new - ); - - private final Map fields; - - public StringQueryPredicate(Source source, String query, String options) { - super(source, query, options, emptyList()); - - // inferred - this.fields = FullTextUtils.parseFields(optionMap(), source); - } - - StringQueryPredicate(StreamInput in) throws IOException { - super(in); - assert super.children().isEmpty(); - this.fields = FullTextUtils.parseFields(optionMap(), source()); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, StringQueryPredicate::new, query(), options()); - } - - @Override - public Expression replaceChildren(List newChildren) { - throw new UnsupportedOperationException("this type of node doesn't have any children to replace"); - } - - public Map fields() { - return fields; - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java index b6383fac33299..7836522c77130 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java @@ -12,7 +12,6 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; @@ -26,7 +25,6 @@ import org.elasticsearch.xpack.esql.core.querydsl.query.MultiMatchQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -73,18 +71,6 @@ private static Query translateField(RegexMatch e, String targetFieldName) { } } - public static class StringQueries extends ExpressionTranslator { - - @Override - protected Query asQuery(StringQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(StringQueryPredicate q, TranslatorHandler handler) { - return new QueryStringQuery(q.source(), q.query(), q.fields(), q); - } - } - public static class MultiMatches extends ExpressionTranslator { @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java index 8ac90e6314174..8dcb87749ae48 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQuery.java @@ -14,7 +14,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import java.util.Collections; @@ -55,20 +54,13 @@ public class QueryStringQuery extends Query { private final String query; private final Map fields; - private StringQueryPredicate predicate; private final Map options; - // dedicated constructor for QueryTranslator - public QueryStringQuery(Source source, String query, String fieldName) { - this(source, query, Collections.singletonMap(fieldName, Float.valueOf(1.0f)), null); - } - - public QueryStringQuery(Source source, String query, Map fields, StringQueryPredicate predicate) { + public QueryStringQuery(Source source, String query, Map fields, Map options) { super(source); this.query = query; this.fields = fields; - this.predicate = predicate; - this.options = predicate == null ? Collections.emptyMap() : predicate.optionMap(); + this.options = options == null ? Collections.emptyMap() : options; } @Override @@ -95,7 +87,7 @@ public String query() { @Override public int hashCode() { - return Objects.hash(query, fields, predicate); + return Objects.hash(query, fields); } @Override @@ -109,7 +101,7 @@ public boolean equals(Object obj) { } QueryStringQuery other = (QueryStringQuery) obj; - return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(predicate, other.predicate); + return Objects.equals(query, other.query) && Objects.equals(fields, other.fields) && Objects.equals(options, other.options); } @Override diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java index 0f80011961092..22e7b93e84ce1 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/querydsl/query/QueryStringQueryTests.java @@ -10,42 +10,40 @@ import org.elasticsearch.index.query.Operator; import org.elasticsearch.index.query.QueryStringQueryBuilder; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.StringUtils; import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.equalTo; public class QueryStringQueryTests extends ESTestCase { public void testQueryBuilding() { - QueryStringQueryBuilder qb = getBuilder("lenient=true"); + QueryStringQueryBuilder qb = getBuilder(Map.of("lenient", "true")); assertThat(qb.lenient(), equalTo(true)); - qb = getBuilder("lenient=true;default_operator=AND"); + qb = getBuilder(Map.of("lenient", "true", "default_operator", "AND")); assertThat(qb.lenient(), equalTo(true)); assertThat(qb.defaultOperator(), equalTo(Operator.AND)); - Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder("pizza=yummy")); + Exception e = expectThrows(IllegalArgumentException.class, () -> getBuilder(Map.of("pizza", "yummy"))); assertThat(e.getMessage(), equalTo("illegal query_string option [pizza]")); - e = expectThrows(ElasticsearchParseException.class, () -> getBuilder("type=aoeu")); + e = expectThrows(ElasticsearchParseException.class, () -> getBuilder(Map.of("type", "aoeu"))); assertThat(e.getMessage(), equalTo("failed to parse [multi_match] query type [aoeu]. unknown type.")); } - private static QueryStringQueryBuilder getBuilder(String options) { + private static QueryStringQueryBuilder getBuilder(Map options) { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", options); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); - return (QueryStringQueryBuilder) mmq.asBuilder(); + final QueryStringQuery query = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), options); + return (QueryStringQueryBuilder) query.asBuilder(); } public void testToString() { final Source source = new Source(1, 1, StringUtils.EMPTY); - final StringQueryPredicate mmqp = new StringQueryPredicate(source, "eggplant", ""); - final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), mmqp); + final QueryStringQuery mmq = new QueryStringQuery(source, "eggplant", Collections.singletonMap("foo", 1.0f), Map.of()); assertEquals("QueryStringQuery@1:2[{foo=1.0}:eggplant]", mmq.toString()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java index 7fdfb4b328869..d59c736783172 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextWritables.java @@ -10,19 +10,12 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MatchQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import java.util.List; public class FullTextWritables { public static List getNamedWriteables() { - return List.of( - MatchQueryPredicate.ENTRY, - MultiMatchQueryPredicate.ENTRY, - StringQueryPredicate.ENTRY, - QueryString.ENTRY, - Match.ENTRY - ); + return List.of(MatchQueryPredicate.ENTRY, MultiMatchQueryPredicate.ENTRY, QueryString.ENTRY, Match.ENTRY); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index f01e7c4b1f3a6..9f574ee8005b2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -19,7 +19,6 @@ import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; @@ -253,8 +252,6 @@ static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePu && Expressions.foldable(cidrMatch.matches()); } else if (exp instanceof SpatialRelatesFunction spatial) { return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof StringQueryPredicate) { - return true; } else if (exp instanceof QueryString) { return true; } else if (exp instanceof Match mf) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java index 12dc77e6e7c59..6fac7bab2bd80 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java @@ -86,7 +86,6 @@ public final class EsqlExpressionTranslators { new ExpressionTranslators.IsNotNulls(), new ExpressionTranslators.Nots(), new ExpressionTranslators.Likes(), - new ExpressionTranslators.StringQueries(), new ExpressionTranslators.MultiMatches(), new MatchFunctionTranslator(), new QueryStringFunctionTranslator(), @@ -536,7 +535,7 @@ protected Query asQuery(Match match, TranslatorHandler handler) { public static class QueryStringFunctionTranslator extends ExpressionTranslator { @Override protected Query asQuery(QueryString queryString, TranslatorHandler handler) { - return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), null); + return new QueryStringQuery(queryString.source(), queryString.queryAsText(), Map.of(), Map.of()); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java deleted file mode 100644 index ff00a161e1bb1..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/fulltext/StringQuerySerializationTests.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.expression.predicate.operator.fulltext; - -import org.elasticsearch.xpack.esql.core.expression.predicate.fulltext.StringQueryPredicate; - -import java.io.IOException; - -public class StringQuerySerializationTests extends AbstractFulltextSerializationTests { - - private static final String COMMA = ","; - - @Override - protected final StringQueryPredicate createTestInstance() { - return new StringQueryPredicate(randomSource(), randomAlphaOfLength(randomIntBetween(1, 16)), randomOptionOrNull()); - } - - @Override - protected StringQueryPredicate mutateInstance(StringQueryPredicate instance) throws IOException { - var query = instance.query(); - var options = instance.options(); - if (randomBoolean()) { - query = randomValueOtherThan(query, () -> randomAlphaOfLength(randomIntBetween(1, 16))); - } else { - options = randomValueOtherThan(options, this::randomOptionOrNull); - } - return new StringQueryPredicate(instance.source(), query, options); - } -} From a9451df21833b2ea38954ae99fe18eadbd2366d8 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 21 Nov 2024 13:11:04 -0800 Subject: [PATCH 16/50] Fix SecureSM to allow innocuous threads and threadgroups for parallel streams (#117277) When a parallel stream is opened, the jdk uses an internal fork join pool to do work on processing the stream. This pool is internal to the jdk, and so it should always be allowed to create threads. This commit modifies SecureSM to account for this innocuous thread group and threads. --- .../org/elasticsearch/secure_sm/SecureSM.java | 18 +++++++++++++++--- .../elasticsearch/secure_sm/SecureSMTests.java | 11 +++++++++++ 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java index 4fd471c529e75..02d0491118dc7 100644 --- a/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java +++ b/libs/secure-sm/src/main/java/org/elasticsearch/secure_sm/SecureSM.java @@ -157,7 +157,9 @@ private static void debugThreadGroups(final ThreadGroup caller, final ThreadGrou // Returns true if the given thread is an instance of the JDK's InnocuousThread. private static boolean isInnocuousThread(Thread t) { final Class c = t.getClass(); - return c.getModule() == Object.class.getModule() && c.getName().equals("jdk.internal.misc.InnocuousThread"); + return c.getModule() == Object.class.getModule() + && (c.getName().equals("jdk.internal.misc.InnocuousThread") + || c.getName().equals("java.util.concurrent.ForkJoinWorkerThread$InnocuousForkJoinWorkerThread")); } protected void checkThreadAccess(Thread t) { @@ -184,11 +186,21 @@ protected void checkThreadAccess(Thread t) { private static final Permission MODIFY_THREADGROUP_PERMISSION = new RuntimePermission("modifyThreadGroup"); private static final Permission MODIFY_ARBITRARY_THREADGROUP_PERMISSION = new ThreadPermission("modifyArbitraryThreadGroup"); + // Returns true if the given thread is an instance of the JDK's InnocuousThread. + private static boolean isInnocuousThreadGroup(ThreadGroup t) { + final Class c = t.getClass(); + return c.getModule() == Object.class.getModule() && t.getName().equals("InnocuousForkJoinWorkerThreadGroup"); + } + protected void checkThreadGroupAccess(ThreadGroup g) { Objects.requireNonNull(g); + boolean targetThreadGroupIsInnocuous = isInnocuousThreadGroup(g); + // first, check if we can modify thread groups at all. - checkPermission(MODIFY_THREADGROUP_PERMISSION); + if (targetThreadGroupIsInnocuous == false) { + checkPermission(MODIFY_THREADGROUP_PERMISSION); + } // check the threadgroup, if its our thread group or an ancestor, its fine. final ThreadGroup source = Thread.currentThread().getThreadGroup(); @@ -196,7 +208,7 @@ protected void checkThreadGroupAccess(ThreadGroup g) { if (source == null) { return; // we are a dead thread, do nothing - } else if (source.parentOf(target) == false) { + } else if (source.parentOf(target) == false && targetThreadGroupIsInnocuous == false) { checkPermission(MODIFY_ARBITRARY_THREADGROUP_PERMISSION); } } diff --git a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java index b94639414ffe5..69c6973f57cdf 100644 --- a/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java +++ b/libs/secure-sm/src/test/java/org/elasticsearch/secure_sm/SecureSMTests.java @@ -14,7 +14,10 @@ import java.security.Permission; import java.security.Policy; import java.security.ProtectionDomain; +import java.util.ArrayList; +import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.stream.Collectors; /** Simple tests for SecureSM */ public class SecureSMTests extends TestCase { @@ -128,4 +131,12 @@ public void run() { t1.join(); assertTrue(interrupted1.get()); } + + public void testParallelStreamThreadGroup() throws Exception { + List list = new ArrayList<>(); + for (int i = 0; i < 100; ++i) { + list.add(i); + } + list.parallelStream().collect(Collectors.toSet()); + } } From ec644b10ec28b6eaaa5d658eedeac02dcb7bab5c Mon Sep 17 00:00:00 2001 From: Brian Seeders Date: Thu, 21 Nov 2024 16:17:22 -0500 Subject: [PATCH 17/50] Bump versions after 8.16.1 release --- .buildkite/pipelines/intake.yml | 2 +- .buildkite/pipelines/periodic-packaging.yml | 6 +++--- .buildkite/pipelines/periodic.yml | 10 +++++----- .ci/bwcVersions | 2 +- .ci/snapshotBwcVersions | 2 +- server/src/main/java/org/elasticsearch/Version.java | 1 + .../resources/org/elasticsearch/TransportVersions.csv | 1 + .../org/elasticsearch/index/IndexVersions.csv | 1 + 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 8935872fdec83..5be5990cfb203 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 2dbb7f5193af6..162a7e4995467 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -288,8 +288,8 @@ steps: env: BWC_VERSION: 8.15.4 - - label: "{{matrix.image}} / 8.16.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.1 + - label: "{{matrix.image}} / 8.16.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.2 timeout_in_minutes: 300 matrix: setup: @@ -302,7 +302,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 - label: "{{matrix.image}} / 8.17.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 047e4a3f4f8f6..aa1db893df8cc 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.1#bwcTest + - label: 8.16.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.1 + BWC_VERSION: 8.16.2 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.1", "8.17.0", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.2", "8.17.0", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.ci/bwcVersions b/.ci/bwcVersions index ac07e14c2a176..a8d6dda4fb0c2 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,7 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.4" - - "8.16.1" + - "8.16.2" - "8.17.0" - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 351c605e6e092..5514fc376a285 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.16.1" + - "8.16.2" - "8.17.0" - "8.18.0" - "9.0.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 40071b19af5d3..7b65547a7d591 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -189,6 +189,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_15_4 = new Version(8_15_04_99); public static final Version V_8_16_0 = new Version(8_16_00_99); public static final Version V_8_16_1 = new Version(8_16_01_99); + public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index ba575cc642a81..6191922f13094 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8702003 8.15.4,8702003 8.16.0,8772001 +8.16.1,8772004 diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index c54aea88613f5..f84d69af727ac 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -133,3 +133,4 @@ 8.15.3,8512000 8.15.4,8512000 8.16.0,8518000 +8.16.1,8518000 From 8fe8d22f7c63d574d7570abcf21757926468b415 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Thu, 21 Nov 2024 14:02:18 -0800 Subject: [PATCH 18/50] [DOCS] Remove broken migration guide link (#117293) --- docs/reference/cluster/update-settings.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/reference/cluster/update-settings.asciidoc b/docs/reference/cluster/update-settings.asciidoc index ca3d100e31e06..3d8bdcca07e2b 100644 --- a/docs/reference/cluster/update-settings.asciidoc +++ b/docs/reference/cluster/update-settings.asciidoc @@ -59,8 +59,8 @@ An example of a transient update: ==== We no longer recommend using transient cluster settings. Use persistent cluster settings instead. If a cluster becomes unstable, transient settings can clear -unexpectedly, resulting in a potentially undesired cluster configuration. See -the <>. +unexpectedly, resulting in a potentially undesired cluster configuration. +// See the <>. ==== // end::transient-settings-warning[] From 7768133f44421e13c40922dbf23ab47b8afdf46c Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:31:02 +1100 Subject: [PATCH 19/50] Mute org.elasticsearch.xpack.test.rest.XPackRestIT test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} #117295 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index d1e1976262f55..01cc6d0355a59 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -250,6 +250,9 @@ tests: - class: org.elasticsearch.discovery.ClusterDisruptionIT method: testAckedIndexing issue: https://github.com/elastic/elasticsearch/issues/117024 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} + issue: https://github.com/elastic/elasticsearch/issues/117295 # Examples: # From bead24880b9d6e8099c7c9a4043f5cee448ed7db Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Fri, 22 Nov 2024 09:39:10 +1100 Subject: [PATCH 20/50] Mute org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests testRetryPointInTime #117116 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 01cc6d0355a59..fae848c600aea 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -253,6 +253,9 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=snapshot/10_basic/Create a source only snapshot and then restore it} issue: https://github.com/elastic/elasticsearch/issues/117295 +- class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests + method: testRetryPointInTime + issue: https://github.com/elastic/elasticsearch/issues/117116 # Examples: # From de73397a05a982212b9f9bad0f5bea3c3bedabb0 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 21 Nov 2024 17:11:38 -0700 Subject: [PATCH 21/50] Add configurable timeout safe await method (#117296) Add a method for a configurable timeout with countdown latches. --- .../main/java/org/elasticsearch/test/ESTestCase.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index d98b51adce615..5b2beaee00bfe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -2331,10 +2331,18 @@ public static void safeAwait(CyclicBarrier barrier) { * flag and asserting that the latch is indeed completed before the timeout. */ public static void safeAwait(CountDownLatch countDownLatch) { + safeAwait(countDownLatch, SAFE_AWAIT_TIMEOUT); + } + + /** + * Await on the given {@link CountDownLatch} with a supplied timeout, preserving the thread's interrupt status + * flag and asserting that the latch is indeed completed before the timeout. + */ + public static void safeAwait(CountDownLatch countDownLatch, TimeValue timeout) { try { assertTrue( "safeAwait: CountDownLatch did not reach zero within the timeout", - countDownLatch.await(SAFE_AWAIT_TIMEOUT.millis(), TimeUnit.MILLISECONDS) + countDownLatch.await(timeout.millis(), TimeUnit.MILLISECONDS) ); } catch (InterruptedException e) { Thread.currentThread().interrupt(); From eff0c42de91f0b8483c06035b10949632869b17d Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 22 Nov 2024 12:53:50 +1100 Subject: [PATCH 22/50] Fix and unmute OperatorPrivilegesIT (#117218) This updates the constants for `OperatorPrivilegesIT` to include the registered actions that were missing and unmutes the test --- muted-tests.yml | 3 --- .../org/elasticsearch/xpack/security/operator/Constants.java | 4 ++++ 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index fae848c600aea..b88bff86a0fbe 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -162,9 +162,6 @@ tests: - class: org.elasticsearch.xpack.deprecation.DeprecationHttpIT method: testDeprecatedSettingsReturnWarnings issue: https://github.com/elastic/elasticsearch/issues/108628 -- class: org.elasticsearch.xpack.security.operator.OperatorPrivilegesIT - method: testEveryActionIsEitherOperatorOnlyOrNonOperator - issue: https://github.com/elastic/elasticsearch/issues/102992 - class: org.elasticsearch.action.search.SearchQueryThenFetchAsyncActionTests method: testBottomFieldSort issue: https://github.com/elastic/elasticsearch/issues/116249 diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index df97c489cc6b7..bfff63442281d 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -358,6 +358,7 @@ public class Constants { "cluster:monitor/nodes/data_tier_usage", "cluster:monitor/nodes/features", "cluster:monitor/nodes/hot_threads", + "cluster:monitor/nodes/index_mode_stats", "cluster:monitor/nodes/info", "cluster:monitor/nodes/stats", "cluster:monitor/nodes/usage", @@ -399,6 +400,7 @@ public class Constants { "cluster:monitor/xpack/info/frozen_indices", "cluster:monitor/xpack/info/graph", "cluster:monitor/xpack/info/ilm", + "cluster:monitor/xpack/info/logsdb", "cluster:monitor/xpack/info/logstash", "cluster:monitor/xpack/info/ml", "cluster:monitor/xpack/info/monitoring", @@ -463,6 +465,7 @@ public class Constants { "cluster:monitor/xpack/usage/health_api", "cluster:monitor/xpack/usage/ilm", "cluster:monitor/xpack/usage/inference", + "cluster:monitor/xpack/usage/logsdb", "cluster:monitor/xpack/usage/logstash", "cluster:monitor/xpack/usage/ml", "cluster:monitor/xpack/usage/monitoring", @@ -488,6 +491,7 @@ public class Constants { "indices:admin/block/add[s]", "indices:admin/cache/clear", "indices:admin/data_stream/lazy_rollover", + "indices:admin/data_stream/reindex", "indices:internal/admin/ccr/restore/file_chunk/get", "indices:internal/admin/ccr/restore/session/clear", "indices:internal/admin/ccr/restore/session/put", From d87a1a2b5f9a16a1bda030980ef831462bf3a686 Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 22 Nov 2024 13:33:06 +1100 Subject: [PATCH 23/50] Restore repo_name label to repository metrics (#117114) --- .../AzureBlobStoreRepositoryMetricsTests.java | 24 +++++++++++++------ .../azure/AzureBlobStoreRepositoryTests.java | 5 +++- .../s3/S3BlobStoreRepositoryTests.java | 5 +++- .../s3/S3RetryingInputStream.java | 2 ++ .../s3/S3BlobContainerRetriesTests.java | 2 +- .../repositories/RepositoriesMetrics.java | 11 ++++++++- 6 files changed, 38 insertions(+), 11 deletions(-) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index e049d4cd372e6..61940be247861 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -112,7 +112,7 @@ public void testThrottleResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numThrottles + 1) .withThrottles(numThrottles) .withExceptions(numThrottles) @@ -137,7 +137,7 @@ public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { assertThrows(RequestedRangeNotSatisfiedException.class, () -> blobContainer.readBlob(purpose, blobName)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB, repository).expectMetrics() .withRequests(1) .withThrottles(0) .withExceptions(1) @@ -170,7 +170,7 @@ public void testErrorResponsesAreCountedInMetrics() throws IOException { blobContainer.blobExists(purpose, blobName); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.GET_BLOB_PROPERTIES, repository).expectMetrics() .withRequests(numErrors + 1) .withThrottles(throttles.get()) .withExceptions(numErrors) @@ -191,7 +191,7 @@ public void testRequestFailuresAreCountedInMetrics() { assertThrows(IOException.class, () -> blobContainer.listBlobs(purpose)); // Correct metrics are recorded - metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS).expectMetrics() + metricsAsserter(dataNodeName, purpose, AzureBlobStore.Operation.LIST_BLOBS, repository).expectMetrics() .withRequests(4) .withThrottles(0) .withExceptions(4) @@ -322,14 +322,20 @@ private void clearMetrics(String discoveryNode) { .forEach(TestTelemetryPlugin::resetMeter); } - private MetricsAsserter metricsAsserter(String dataNodeName, OperationPurpose operationPurpose, AzureBlobStore.Operation operation) { - return new MetricsAsserter(dataNodeName, operationPurpose, operation); + private MetricsAsserter metricsAsserter( + String dataNodeName, + OperationPurpose operationPurpose, + AzureBlobStore.Operation operation, + String repository + ) { + return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); } private class MetricsAsserter { private final String dataNodeName; private final OperationPurpose purpose; private final AzureBlobStore.Operation operation; + private final String repository; enum Result { Success, @@ -355,10 +361,11 @@ List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, Strin abstract List getMeasurements(TestTelemetryPlugin testTelemetryPlugin, String name); } - private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation) { + private MetricsAsserter(String dataNodeName, OperationPurpose purpose, AzureBlobStore.Operation operation, String repository) { this.dataNodeName = dataNodeName; this.purpose = purpose; this.operation = operation; + this.repository = repository; } private class Expectations { @@ -451,6 +458,7 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa .filter( m -> m.attributes().get("operation").equals(operation.getKey()) && m.attributes().get("purpose").equals(purpose.getKey()) + && m.attributes().get("repo_name").equals(repository) && m.attributes().get("repo_type").equals("azure") ) .findFirst() @@ -462,6 +470,8 @@ private void assertMatchingMetricRecorded(MetricType metricType, String metricNa + operation.getKey() + " and purpose=" + purpose.getKey() + + " and repo_name=" + + repository + " in " + measurements ) diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java index ab3f3ee4f3728..bd21f208faac4 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryTests.java @@ -402,7 +402,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", AzureRepository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final AzureBlobStore.Operation operation = AzureBlobStore.Operation.fromKey((String) metric.attributes().get("operation")); final AzureBlobStore.StatsKey statsKey = new AzureBlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index d9480abf21687..bb8a452e21771 100644 --- a/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/modules/repository-s3/src/internalClusterTest/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -300,7 +300,10 @@ public void testMetrics() throws Exception { ) ); metrics.forEach(metric -> { - assertThat(metric.attributes(), allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("operation"), hasKey("purpose"))); + assertThat( + metric.attributes(), + allOf(hasEntry("repo_type", S3Repository.TYPE), hasKey("repo_name"), hasKey("operation"), hasKey("purpose")) + ); final S3BlobStore.Operation operation = S3BlobStore.Operation.parse((String) metric.attributes().get("operation")); final S3BlobStore.StatsKey statsKey = new S3BlobStore.StatsKey( operation, diff --git a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java index 7407522651e55..da357dc09ab95 100644 --- a/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java +++ b/modules/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RetryingInputStream.java @@ -327,6 +327,8 @@ private Map metricAttributes(String action) { return Map.of( "repo_type", S3Repository.TYPE, + "repo_name", + blobStore.getRepositoryMetadata().name(), "operation", Operation.GET_OBJECT.getKey(), "purpose", diff --git a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java index ac49cffc1e0da..b292dc5872994 100644 --- a/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java +++ b/modules/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobContainerRetriesTests.java @@ -1106,7 +1106,7 @@ private List getRetryHistogramMeasurements() { } private Map metricAttributes(String action) { - return Map.of("repo_type", "s3", "operation", "GetObject", "purpose", "Indices", "action", action); + return Map.of("repo_type", "s3", "repo_name", "repository", "operation", "GetObject", "purpose", "Indices", "action", action); } /** diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java index 3a210199065b7..2cd6e2b11ef7a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesMetrics.java @@ -127,7 +127,16 @@ public static Map createAttributesMap( OperationPurpose purpose, String operation ) { - return Map.of("repo_type", repositoryMetadata.type(), "operation", operation, "purpose", purpose.getKey()); + return Map.of( + "repo_type", + repositoryMetadata.type(), + "repo_name", + repositoryMetadata.name(), + "operation", + operation, + "purpose", + purpose.getKey() + ); } } From 4793caa9af8b7eb90ad4b62e8d638be987c7279f Mon Sep 17 00:00:00 2001 From: Nick Tindall Date: Fri, 22 Nov 2024 14:45:04 +1100 Subject: [PATCH 24/50] Expand LineLength exemption to include all comments with a URL (#117221) --- build-tools-internal/src/main/resources/checkstyle.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/resources/checkstyle.xml b/build-tools-internal/src/main/resources/checkstyle.xml index daedc2ac3c629..9ed31d993909e 100644 --- a/build-tools-internal/src/main/resources/checkstyle.xml +++ b/build-tools-internal/src/main/resources/checkstyle.xml @@ -57,7 +57,7 @@ unfair. --> - + From 8cfe8f1c5ccbe00422609c36819a58115caad922 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Fri, 22 Nov 2024 07:13:44 +0100 Subject: [PATCH 25/50] MultiBucketsAggregation.Bucket does not implement ToXContent anymore (#117240) This change makes some buckets implementation leaner. --- .../adjacency/InternalAdjacencyMatrix.java | 6 +-- .../histogram/InternalAutoDateHistogram.java | 6 +-- .../bucket/timeseries/InternalTimeSeries.java | 22 ++++------- .../timeseries/TimeSeriesAggregator.java | 3 +- .../timeseries/InternalTimeSeriesTests.java | 20 +++++----- .../timeseries/TimeSeriesAggregatorTests.java | 6 +-- .../bucket/MultiBucketsAggregation.java | 3 +- .../bucket/composite/InternalComposite.java | 8 ---- .../bucket/filter/FiltersAggregator.java | 14 ++----- .../bucket/filter/InternalFilters.java | 30 +++++--------- .../bucket/geogrid/InternalGeoGrid.java | 2 +- .../bucket/geogrid/InternalGeoGridBucket.java | 5 +-- .../AbstractHistogramAggregator.java | 2 +- .../histogram/DateHistogramAggregator.java | 3 +- .../DateRangeHistogramAggregator.java | 1 - .../histogram/InternalDateHistogram.java | 27 +++++-------- .../bucket/histogram/InternalHistogram.java | 27 +++++-------- .../InternalVariableWidthHistogram.java | 6 +-- .../bucket/prefix/InternalIpPrefix.java | 38 +++--------------- .../bucket/prefix/IpPrefixAggregator.java | 2 - .../bucket/range/BinaryRangeAggregator.java | 4 +- .../bucket/range/InternalBinaryRange.java | 29 ++++---------- .../bucket/range/InternalDateRange.java | 18 ++------- .../bucket/range/InternalGeoDistance.java | 10 ++--- .../bucket/range/InternalRange.java | 39 ++++--------------- .../bucket/range/RangeAggregator.java | 15 ++----- .../bucket/terms/AbstractInternalTerms.java | 5 ++- .../terms/InternalMappedSignificantTerms.java | 2 +- .../bucket/terms/InternalMappedTerms.java | 2 +- .../bucket/terms/InternalRareTerms.java | 6 +-- .../terms/InternalSignificantTerms.java | 4 +- .../bucket/terms/InternalTerms.java | 3 +- .../bucket/terms/UnmappedTerms.java | 2 +- .../search/SearchResponseMergerTests.java | 11 +----- .../InternalAggregationsTests.java | 32 ++++++--------- .../bucket/filter/InternalFiltersTests.java | 7 ++-- .../histogram/InternalDateHistogramTests.java | 10 +---- .../histogram/InternalHistogramTests.java | 12 ++---- .../bucket/prefix/InternalIpPrefixTests.java | 16 +------- .../range/InternalBinaryRangeTests.java | 3 +- .../bucket/range/InternalDateRangeTests.java | 6 +-- .../range/InternalGeoDistanceTests.java | 6 +-- .../bucket/range/InternalRangeTests.java | 6 +-- .../pipeline/BucketHelpersTests.java | 10 ----- .../multiterms/InternalMultiTerms.java | 7 ++-- .../InternalCategorizationAggregation.java | 6 +-- .../aggs/changepoint/ChangePointBucket.java | 3 +- .../aggregation/AggregationTestUtils.java | 2 +- .../rollup/RollupResponseTranslator.java | 10 +---- .../xpack/sql/execution/search/Querier.java | 6 --- .../search/extractor/TestBucket.java | 7 ---- .../pivot/AggregationResultUtilsTests.java | 16 ++++---- 52 files changed, 157 insertions(+), 389 deletions(-) diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 6f36f1f17bf8b..824f009bc7d8e 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -237,7 +235,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java index 42aa79f990fc6..edb7ec4cffce7 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -99,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, DocValueFormat format) throws IOException { String keyAsString = format.format(key).toString(); builder.startObject(); if (format != DocValueFormat.RAW) { @@ -110,7 +109,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -597,7 +595,7 @@ private BucketReduceResult mergeConsecutiveBuckets( public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, format); } builder.endArray(); builder.field("interval", getInterval().toString()); diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java index c4cdacd135cb4..d7590f2126325 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeries.java @@ -36,24 +36,21 @@ public class InternalTimeSeries extends InternalMultiBucketAggregation buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalTimeSeries.InternalBucket(in, keyed)); + buckets.add(new InternalTimeSeries.InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -162,7 +156,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -252,14 +246,14 @@ public InternalTimeSeries create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } private InternalBucket reduceBucket(List buckets, AggregationReduceContext context) { InternalTimeSeries.InternalBucket reduced = null; for (InternalTimeSeries.InternalBucket bucket : buckets) { if (reduced == null) { - reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations, bucket.keyed); + reduced = new InternalTimeSeries.InternalBucket(bucket.key, bucket.docCount, bucket.aggregations); } else { reduced.docCount += bucket.docCount; } diff --git a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java index 369ae4590fe97..63472bca1d9ac 100644 --- a/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java +++ b/modules/aggregations/src/main/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregator.java @@ -83,8 +83,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw InternalTimeSeries.InternalBucket bucket = new InternalTimeSeries.InternalBucket( BytesRef.deepCopyOf(spare), // Closing bucketOrds will corrupt the bytes ref, so need to make a deep copy here. docCount, - null, - keyed + null ); bucket.bucketOrd = ordsEnum.ord(); buckets.add(bucket); diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java index e61c02e0b9cd2..3b67d09c0d6a1 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/InternalTimeSeriesTests.java @@ -49,7 +49,7 @@ private List randomBuckets(boolean keyed, InternalAggregations a } try { var key = TimeSeriesIdFieldMapper.buildLegacyTsid(routingPathFields).toBytesRef(); - bucketList.add(new InternalBucket(key, docCount, aggregations, keyed)); + bucketList.add(new InternalBucket(key, docCount, aggregations)); } catch (IOException e) { throw new UncheckedIOException(e); } @@ -108,10 +108,10 @@ public void testReduceSimple() { InternalTimeSeries first = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 3, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("10"), 6, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("2"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 5, InternalAggregations.EMPTY) ), false, Map.of() @@ -119,8 +119,8 @@ public void testReduceSimple() { InternalTimeSeries second = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("2"), 1, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 3, InternalAggregations.EMPTY) ), false, Map.of() @@ -128,9 +128,9 @@ public void testReduceSimple() { InternalTimeSeries third = new InternalTimeSeries( "ts", List.of( - new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY, false), - new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY, false) + new InternalBucket(new BytesRef("1"), 2, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("3"), 4, InternalAggregations.EMPTY), + new InternalBucket(new BytesRef("9"), 4, InternalAggregations.EMPTY) ), false, Map.of() diff --git a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java index d9a4023457126..493b4bdc81860 100644 --- a/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java +++ b/modules/aggregations/src/test/java/org/elasticsearch/aggregations/bucket/timeseries/TimeSeriesAggregatorTests.java @@ -176,19 +176,19 @@ public void testMultiBucketAggregationAsSubAggregation() throws IOException { InternalDateHistogram byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=xxx}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=aaa, dim2=yyy}").docCount, equalTo(2L)); byTimeStampBucket = ts.getBucketByKey("{dim1=aaa, dim2=yyy}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 2, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 2, null, InternalAggregations.EMPTY)) ); assertThat(ts.getBucketByKey("{dim1=bbb, dim2=zzz}").docCount, equalTo(4L)); byTimeStampBucket = ts.getBucketByKey("{dim1=bbb, dim2=zzz}").getAggregations().get("by_timestamp"); assertThat( byTimeStampBucket.getBuckets(), - contains(new InternalDateHistogram.Bucket(startTime, 4, false, null, InternalAggregations.EMPTY)) + contains(new InternalDateHistogram.Bucket(startTime, 4, null, InternalAggregations.EMPTY)) ); }; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java index 87ebec525a6fa..d39e90b44579e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MultiBucketsAggregation.java @@ -12,7 +12,6 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.HasAggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.xcontent.ToXContent; import java.util.List; @@ -24,7 +23,7 @@ public interface MultiBucketsAggregation extends Aggregation { * A bucket represents a criteria to which all documents that fall in it adhere to. It is also uniquely identified * by a key, and can potentially hold sub-aggregations computed over all documents in it. */ - interface Bucket extends HasAggregations, ToXContent { + interface Bucket extends HasAggregations { /** * @return The key associated with the bucket */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index 30c45ba46d9b7..8b3253418bc23 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -465,14 +465,6 @@ public int compareKey(InternalBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - /** - * See {@link CompositeAggregation#bucketToXContent} - */ - throw new UnsupportedOperationException("not implemented"); - } - InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( sourceNames, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index 69eff3630a8f4..a9ec0ba878ec0 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -215,15 +215,9 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw filters.size() + (otherBucketKey == null ? 0 : 1), (offsetInOwningOrd, docCount, subAggregationResults) -> { if (offsetInOwningOrd < filters.size()) { - return new InternalFilters.InternalBucket( - filters.get(offsetInOwningOrd).key(), - docCount, - subAggregationResults, - keyed, - keyedBucket - ); + return new InternalFilters.InternalBucket(filters.get(offsetInOwningOrd).key(), docCount, subAggregationResults); } - return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults, keyed, keyedBucket); + return new InternalFilters.InternalBucket(otherBucketKey, docCount, subAggregationResults); }, buckets -> new InternalFilters(name, buckets, keyed, keyedBucket, metadata()) ); @@ -234,12 +228,12 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(filters.size() + (otherBucketKey == null ? 0 : 1)); for (QueryToFilterAdapter filter : filters) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(filter.key(), 0, subAggs); buckets.add(bucket); } if (otherBucketKey != null) { - InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs, keyed, keyedBucket); + InternalFilters.InternalBucket bucket = new InternalFilters.InternalBucket(otherBucketKey, 0, subAggs); buckets.add(bucket); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index a5dfb0d8efafa..c05759582346a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -32,26 +32,20 @@ public class InternalFilters extends InternalMultiBucketAggregation implements Filters { public static class InternalBucket extends InternalMultiBucketAggregation.InternalBucket implements Filters.Bucket { - private final boolean keyed; - private final boolean keyedBucket; private final String key; private long docCount; InternalAggregations aggregations; - public InternalBucket(String key, long docCount, InternalAggregations aggregations, boolean keyed, boolean keyedBucket) { + public InternalBucket(String key, long docCount, InternalAggregations aggregations) { this.key = key; - this.keyedBucket = keyedBucket; this.docCount = docCount; this.aggregations = aggregations; - this.keyed = keyed; } /** * Read from a stream. */ - public InternalBucket(StreamInput in, boolean keyed, boolean keyedBucket) throws IOException { - this.keyed = keyed; - this.keyedBucket = keyedBucket; + public InternalBucket(StreamInput in) throws IOException { key = in.readOptionalString(); docCount = in.readVLong(); aggregations = InternalAggregations.readFrom(in); @@ -84,8 +78,7 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed, boolean keyedBucket) throws IOException { if (keyed && keyedBucket) { builder.startObject(key); } else { @@ -97,7 +90,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -110,24 +102,20 @@ public boolean equals(Object other) { } InternalBucket that = (InternalBucket) other; return Objects.equals(key, that.key) - && Objects.equals(keyed, that.keyed) - && Objects.equals(keyedBucket, that.keyedBucket) && Objects.equals(docCount, that.docCount) && Objects.equals(aggregations, that.aggregations); } @Override public int hashCode() { - return Objects.hash(getClass(), key, keyed, keyedBucket, docCount, aggregations); + return Objects.hash(getClass(), key, docCount, aggregations); } InternalBucket finalizeSampling(SamplingContext samplingContext) { return new InternalBucket( key, samplingContext.scaleUp(docCount), - InternalAggregations.finalizeSampling(aggregations, samplingContext), - keyed, - keyedBucket + InternalAggregations.finalizeSampling(aggregations, samplingContext) ); } } @@ -155,7 +143,7 @@ public InternalFilters(StreamInput in) throws IOException { int size = in.readVInt(); List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { - buckets.add(new InternalBucket(in, keyed, keyedBucket)); + buckets.add(new InternalBucket(in)); } this.buckets = buckets; this.bucketMap = null; @@ -182,7 +170,7 @@ public InternalFilters create(List buckets) { @Override public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) { - return new InternalBucket(prototype.key, prototype.docCount, aggregations, prototype.keyed, keyedBucket); + return new InternalBucket(prototype.key, prototype.docCount, aggregations); } @Override @@ -211,7 +199,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont ) { @Override protected InternalBucket createBucket(InternalBucket proto, long docCount, InternalAggregations aggregations) { - return new InternalBucket(proto.key, docCount, aggregations, proto.keyed, proto.keyedBucket); + return new InternalBucket(proto.key, docCount, aggregations); } }; @@ -252,7 +240,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed, keyedBucket); } if (keyed && keyedBucket) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java index d56625ab28c51..6a32b41034503 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGrid.java @@ -152,7 +152,7 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (InternalGeoGridBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java index c972845468c2b..9e3c96da2e70b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -76,14 +77,12 @@ public int compareTo(InternalGeoGridBucket other) { return 0; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); builder.field(Aggregation.CommonFields.KEY.getPreferredName(), getKeyAsString()); builder.field(Aggregation.CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java index ed687df6377dd..5ea8cd035e580 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AbstractHistogramAggregator.java @@ -84,7 +84,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { double roundKey = Double.longBitsToDouble(bucketValue); double key = roundKey * interval + offset; - return new InternalHistogram.Bucket(key, docCount, keyed, formatter, subAggregationResults); + return new InternalHistogram.Bucket(key, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index cc2db63fa5ec5..1eb0226ad8c8c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -340,7 +340,7 @@ private void addRoundedValue(long rounded, int doc, long owningBucketOrd, LeafBu @Override public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throws IOException { return buildAggregationsForVariableBuckets(owningBucketOrds, bucketOrds, (bucketValue, docCount, subAggregationResults) -> { - return new InternalDateHistogram.Bucket(bucketValue, docCount, keyed, formatter, subAggregationResults); + return new InternalDateHistogram.Bucket(bucketValue, docCount, formatter, subAggregationResults); }, (owningBucketOrd, buckets) -> { // the contract of the histogram aggregation is that shards must return buckets ordered by key in ascending order CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator()); @@ -466,7 +466,6 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { new InternalDateHistogram.Bucket( rangeBucket.getFrom().toInstant().toEpochMilli(), rangeBucket.getDocCount(), - keyed, format, rangeBucket.getAggregations() ) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java index f385f7c34f6b7..5a104055d9aec 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateRangeHistogramAggregator.java @@ -171,7 +171,6 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw (bucketValue, docCount, subAggregationResults) -> new InternalDateHistogram.Bucket( bucketValue, docCount, - keyed, formatter, subAggregationResults ), diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 564abff2a9f97..d2badbeec4622 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -53,19 +53,17 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< public static class Bucket extends AbstractHistogramBucket implements KeyComparable { final long key; - private final transient boolean keyed; - public Bucket(long key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(long key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readLong(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readLong(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -101,8 +99,7 @@ public Object getKey() { return Instant.ofEpochMilli(key).atZone(ZoneOffset.UTC); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -116,7 +113,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -124,15 +120,10 @@ public int compareKey(Bucket other) { return Long.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -237,7 +228,7 @@ public InternalDateHistogram(StreamInput in) throws IOException { } else { downsampledResultsOffset = false; } - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -301,7 +292,7 @@ public InternalDateHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(final PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -398,7 +389,7 @@ public void accept(long key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new InternalDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -546,7 +537,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -603,7 +594,7 @@ public InternalAggregation createAggregation(List { final double key; - private final transient boolean keyed; - public Bucket(double key, long docCount, boolean keyed, DocValueFormat format, InternalAggregations aggregations) { + public Bucket(double key, long docCount, DocValueFormat format, InternalAggregations aggregations) { super(docCount, aggregations, format); - this.keyed = keyed; this.key = key; } /** * Read from a stream. */ - public static Bucket readFrom(StreamInput in, boolean keyed, DocValueFormat format) throws IOException { - return new Bucket(in.readDouble(), in.readVLong(), keyed, format, InternalAggregations.readFrom(in)); + public static Bucket readFrom(StreamInput in, DocValueFormat format) throws IOException { + return new Bucket(in.readDouble(), in.readVLong(), format, InternalAggregations.readFrom(in)); } @Override @@ -96,8 +94,7 @@ public Object getKey() { return key; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String keyAsString = format.format(key).toString(); if (keyed) { builder.startObject(keyAsString); @@ -111,7 +108,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -119,15 +115,10 @@ public int compareKey(Bucket other) { return Double.compare(key, other.key); } - public boolean getKeyed() { - return keyed; - } - Bucket finalizeSampling(SamplingContext samplingContext) { return new Bucket( key, samplingContext.scaleUp(docCount), - keyed, format, InternalAggregations.finalizeSampling(aggregations, samplingContext) ); @@ -220,7 +211,7 @@ public InternalHistogram(StreamInput in) throws IOException { } format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); - buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, keyed, format)); + buckets = in.readCollectionAsList(stream -> Bucket.readFrom(stream, format)); // we changed the order format in 8.13 for partial reduce, therefore we need to order them to perform merge sort if (in.getTransportVersion().between(TransportVersions.V_8_13_0, TransportVersions.V_8_14_0)) { // list is mutable by #readCollectionAsList contract @@ -265,7 +256,7 @@ public InternalHistogram create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(prototype.key, prototype.docCount, prototype.keyed, prototype.format, aggregations); + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); } private List reduceBuckets(PriorityQueue> pq, AggregationReduceContext reduceContext) { @@ -373,7 +364,7 @@ public void accept(double key) { reduceContext.consumeBucketsAndMaybeBreak(size); size = 0; } - iter.add(new Bucket(key, 0, keyed, format, reducedEmptySubAggs)); + iter.add(new Bucket(key, 0, format, reducedEmptySubAggs)); } }); } @@ -478,7 +469,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -508,7 +499,7 @@ public InternalAggregation createAggregation(List { - private final transient DocValueFormat format; private final BytesRef key; - private final boolean keyed; private final boolean isIpv6; private final int prefixLength; private final boolean appendPrefixLength; @@ -48,18 +46,14 @@ public static class Bucket extends InternalMultiBucketAggregation.InternalBucket private final InternalAggregations aggregations; public Bucket( - DocValueFormat format, BytesRef key, - boolean keyed, boolean isIpv6, int prefixLength, boolean appendPrefixLength, long docCount, InternalAggregations aggregations ) { - this.format = format; this.key = key; - this.keyed = keyed; this.isIpv6 = isIpv6; this.prefixLength = prefixLength; this.appendPrefixLength = appendPrefixLength; @@ -70,9 +64,7 @@ public Bucket( /** * Read from a stream. */ - public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - this.format = format; - this.keyed = keyed; + public Bucket(StreamInput in) throws IOException { this.key = in.readBytesRef(); this.isIpv6 = in.readBoolean(); this.prefixLength = in.readVInt(); @@ -81,8 +73,7 @@ public Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOExc this.aggregations = InternalAggregations.readFrom(in); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params, boolean keyed) throws IOException { String key = DocValueFormat.IP.format(this.key); if (appendPrefixLength) { key = key + "/" + prefixLength; @@ -101,7 +92,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(IpPrefixAggregationBuilder.PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } private static BytesRef netmask(int prefixLength) { @@ -118,10 +108,6 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - public DocValueFormat getFormat() { - return format; - } - public BytesRef getKey() { return key; } @@ -162,14 +148,13 @@ public boolean equals(Object o) { && prefixLength == bucket.prefixLength && appendPrefixLength == bucket.appendPrefixLength && docCount == bucket.docCount - && Objects.equals(format, bucket.format) && Objects.equals(key, bucket.key) && Objects.equals(aggregations, bucket.aggregations); } @Override public int hashCode() { - return Objects.hash(format, key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); + return Objects.hash(key, isIpv6, prefixLength, appendPrefixLength, docCount, aggregations); } @Override @@ -206,7 +191,7 @@ public InternalIpPrefix(StreamInput in) throws IOException { format = in.readNamedWriteable(DocValueFormat.class); keyed = in.readBoolean(); minDocCount = in.readVLong(); - buckets = in.readCollectionAsList(stream -> new Bucket(stream, format, keyed)); + buckets = in.readCollectionAsList(Bucket::new); } @Override @@ -298,7 +283,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (InternalIpPrefix.Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); @@ -316,9 +301,7 @@ public InternalIpPrefix create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { return new Bucket( - format, prototype.key, - prototype.keyed, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, @@ -328,16 +311,7 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) } private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { - return new Bucket( - format, - prototype.key, - prototype.keyed, - prototype.isIpv6, - prototype.prefixLength, - prototype.appendPrefixLength, - docCount, - aggregations - ); + return new Bucket(prototype.key, prototype.isIpv6, prototype.prefixLength, prototype.appendPrefixLength, docCount, aggregations); } private Bucket reduceBucket(List buckets, AggregationReduceContext context) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java index e3192e9b2fa16..38d26bfa9ae28 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregator.java @@ -200,9 +200,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw checkRealMemoryCBForInternalBucket(); buckets.add( new InternalIpPrefix.Bucket( - config.format(), BytesRef.deepCopyOf(ipAddress), - keyed, ipPrefix.isIpv6, ipPrefix.prefixLength, ipPrefix.appendPrefixLength, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 9bde8d007c1b7..c10bb3543549e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -366,7 +366,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, docCount, subAggregationResults); + return new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, docCount, subAggregationResults); }, buckets -> new InternalBinaryRange(name, format, keyed, buckets, metadata()) ); @@ -378,7 +378,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (Range range : ranges) { - InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, keyed, range.key, range.from, range.to, 0, subAggs); + InternalBinaryRange.Bucket bucket = new InternalBinaryRange.Bucket(format, range.key, range.from, range.to, 0, subAggs); buckets.add(bucket); } return new InternalBinaryRange(name, format, keyed, buckets, metadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 100bab7443a51..9571dfebc6069 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -39,23 +39,13 @@ public final class InternalBinaryRange extends InternalMultiBucketAggregation Bucket.createFromStream(stream, format, keyed)); + buckets = in.readCollectionAsList(stream -> Bucket.createFromStream(stream, format)); } @Override @@ -235,7 +222,7 @@ public InternalBinaryRange create(List buckets) { @Override public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { - return new Bucket(format, keyed, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); + return new Bucket(format, prototype.key, prototype.from, prototype.to, prototype.docCount, aggregations); } @Override @@ -251,7 +238,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return new Bucket(proto.format, proto.keyed, proto.key, proto.from, proto.to, docCount, aggregations); + return new Bucket(proto.format, proto.key, proto.from, proto.to, docCount, aggregations); } }; @@ -299,7 +286,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (Bucket range : buckets) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java index 7b2858806c325..7291a099dd7f7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRange.java @@ -34,19 +34,11 @@ public Bucket( boolean keyed, DocValueFormat formatter ) { - super(key, from, to, docCount, InternalAggregations.from(aggregations), keyed, formatter); + super(key, from, to, docCount, InternalAggregations.from(aggregations), formatter); } - public Bucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat formatter - ) { - super(key, from, to, docCount, aggregations, keyed, formatter); + public Bucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat formatter) { + super(key, from, to, docCount, aggregations, formatter); } @Override @@ -99,10 +91,9 @@ public Bucket createBucket( double to, long docCount, InternalAggregations aggregations, - boolean keyed, DocValueFormat formatter ) { - return new Bucket(key, from, to, docCount, aggregations, keyed, formatter); + return new Bucket(key, from, to, docCount, aggregations, formatter); } @Override @@ -113,7 +104,6 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) prototype.internalGetTo(), prototype.getDocCount(), aggregations, - prototype.getKeyed(), prototype.getFormat() ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java index d1c3761d45e82..9a33df4702c1c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistance.java @@ -23,8 +23,8 @@ public class InternalGeoDistance extends InternalRange ranges, DocValueFormat format, boolean keye } @SuppressWarnings("unchecked") - public B createBucket( - String key, - double from, - double to, - long docCount, - InternalAggregations aggregations, - boolean keyed, - DocValueFormat format - ) { - return (B) new Bucket(key, from, to, docCount, aggregations, keyed, format); + public B createBucket(String key, double from, double to, long docCount, InternalAggregations aggregations, DocValueFormat format) { + return (B) new Bucket(key, from, to, docCount, aggregations, format); } @SuppressWarnings("unchecked") @@ -232,7 +209,6 @@ public B createBucket(InternalAggregations aggregations, B prototype) { prototype.to, prototype.getDocCount(), aggregations, - prototype.keyed, prototype.format ); } @@ -285,7 +261,7 @@ public InternalRange(StreamInput in) throws IOException { } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); - ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, keyed, format)); + ranges.add(getFactory().createBucket(key, from, to, docCount, aggregations, format)); } this.ranges = ranges; } @@ -335,7 +311,7 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont @Override protected Bucket createBucket(Bucket proto, long docCount, InternalAggregations aggregations) { - return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.keyed, proto.format); + return getFactory().createBucket(proto.key, proto.from, proto.to, docCount, aggregations, proto.format); } }; @@ -371,7 +347,6 @@ public InternalAggregation finalizeSampling(SamplingContext samplingContext) { b.to, samplingContext.scaleUp(b.getDocCount()), InternalAggregations.finalizeSampling(b.getAggregations(), samplingContext), - b.keyed, b.format ) ) @@ -390,7 +365,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th builder.startArray(CommonFields.BUCKETS.getPreferredName()); } for (B range : ranges) { - range.toXContent(builder, params); + range.bucketToXContent(builder, params, keyed); } if (keyed) { builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index 0654a788a10a9..a4574e8081868 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -538,15 +538,7 @@ public InternalAggregation[] buildAggregations(LongArray owningBucketOrds) throw ranges.length, (offsetInOwningOrd, docCount, subAggregationResults) -> { Range range = ranges[offsetInOwningOrd]; - return rangeFactory.createBucket( - range.key, - range.originalFrom, - range.originalTo, - docCount, - subAggregationResults, - keyed, - format - ); + return rangeFactory.createBucket(range.key, range.originalFrom, range.originalTo, docCount, subAggregationResults, format); }, buckets -> rangeFactory.create(name, buckets, format, keyed, metadata()) ); @@ -564,7 +556,6 @@ public InternalAggregation buildEmptyAggregation() { range.originalTo, 0, subAggs, - keyed, format ); buckets.add(bucket); @@ -614,7 +605,7 @@ public InternalAggregation buildEmptyAggregation() { InternalAggregations subAggs = buildEmptySubAggregations(); List buckets = new ArrayList<>(ranges.length); for (RangeAggregator.Range range : ranges) { - buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, keyed, format)); + buckets.add(factory.createBucket(range.key, range.originalFrom, range.originalTo, 0, subAggs, format)); } return factory.create(name, buckets, format, keyed, metadata()); } @@ -886,7 +877,7 @@ protected InternalAggregation adapt(InternalAggregation delegateResult) { Range r = ranges[i]; InternalFilters.InternalBucket b = filters.getBuckets().get(i); buckets.add( - rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), keyed, format) + rangeFactory.createBucket(r.getKey(), r.originalFrom, r.originalTo, b.getDocCount(), b.getAggregations(), format) ); } return rangeFactory.create(name(), buckets, format, keyed, filters.getMetadata()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java index 5c422a9dd4e32..6388eb3baaa84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/AbstractInternalTerms.java @@ -66,6 +66,8 @@ public abstract static class AbstractTermsBucket> buckets @@ -369,7 +372,7 @@ protected static XContentBuilder doXContentCommon( builder.field(SUM_OF_OTHER_DOC_COUNTS.getPreferredName(), otherDocCount); builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (AbstractTermsBucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params, showDocCountError); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java index f179b7d05f9a4..3f75a27306ab4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedSignificantTerms.java @@ -134,7 +134,7 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th // There is a condition (presumably when only one shard has a bucket?) where reduce is not called // and I end up with buckets that contravene the user's min_doc_count criteria in my reducer if (bucket.subsetDf >= minDocCount) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java index 563321f56cb5f..5b9403840dfff 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalMappedTerms.java @@ -145,6 +145,6 @@ public int hashCode() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java index 6540cd2ee38da..64cebee880141 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalRareTerms.java @@ -81,14 +81,12 @@ public InternalAggregations getAggregations() { return aggregations; } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; @@ -160,7 +158,7 @@ protected static XContentBuilder doXContentCommon(XContentBuilder builder, Param throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java index eeb7305ac51fa..3f579947248bb 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalSignificantTerms.java @@ -157,8 +157,7 @@ public int hashCode() { return Objects.hash(getClass(), aggregations, score, format); } - @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + final void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -166,7 +165,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) builder.field(BG_COUNT, supersetDf); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 8e25c164d5f33..b94b1f5ea40b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -136,7 +136,7 @@ public void setAggregations(InternalAggregations aggregations) { } @Override - public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public final void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); keyToXContent(builder); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); @@ -145,7 +145,6 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } protected abstract XContentBuilder keyToXContent(XContentBuilder builder) throws IOException; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java index 7755f1db6a3ee..8047d1f06990f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java @@ -111,7 +111,7 @@ public boolean canLeadReduction() { @Override public final XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, 0L, 0, Collections.emptyList()); + return doXContentCommon(builder, params, false, 0L, 0, Collections.emptyList()); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 51796f404c283..d54ac9c66d9a5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -639,7 +639,6 @@ public void testMergeAggs() throws InterruptedException { 10000D, count, InternalAggregations.EMPTY, - false, DocValueFormat.RAW ); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); @@ -1498,15 +1497,7 @@ private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Inde private static InternalAggregations createDeterminsticAggregation(String maxAggName, String rangeAggName, double value, int count) { Max max = new Max(maxAggName, value, DocValueFormat.RAW, Collections.emptyMap()); InternalDateRange.Factory factory = new InternalDateRange.Factory(); - InternalDateRange.Bucket bucket = factory.createBucket( - "bucket", - 0D, - 10000D, - count, - InternalAggregations.EMPTY, - false, - DocValueFormat.RAW - ); + InternalDateRange.Bucket bucket = factory.createBucket("bucket", 0D, 10000D, count, InternalAggregations.EMPTY, DocValueFormat.RAW); InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java index bd423999722f3..c9185fe35e677 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationsTests.java @@ -137,17 +137,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -157,17 +155,15 @@ protected AggregatorReducer getLeaderReducer(AggregationReduceContext reduceCont new InternalFiltersForF2( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, @@ -192,17 +188,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k1k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k1k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ), new InternalFilters.InternalBucket( "f1k2", @@ -212,17 +206,15 @@ InternalAggregations reduced(int k1, int k2, int k1k1, int k1k2, int k2k1, int k new InternalFilters( "f2", List.of( - new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY, true, true), - new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY, true, true) + new InternalFilters.InternalBucket("f2k1", k2k1, InternalAggregations.EMPTY), + new InternalFilters.InternalBucket("f2k2", k2k2, InternalAggregations.EMPTY) ), true, true, null ) ) - ), - true, - true + ) ) ), true, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java index c300bfed5f62a..ad2543548dcae 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFiltersTests.java @@ -59,10 +59,9 @@ public void setUp() throws Exception { @Override protected InternalFilters createTestInstance(String name, Map metadata, InternalAggregations aggregations) { final List buckets = new ArrayList<>(); - for (int i = 0; i < keys.size(); ++i) { - String key = keys.get(i); + for (String key : keys) { int docCount = randomIntBetween(0, 1000); - buckets.add(new InternalFilters.InternalBucket(key, docCount, aggregations, keyed, keyedBucket)); + buckets.add(new InternalBucket(key, docCount, aggregations)); } return new InternalFilters(name, buckets, keyed, keyedBucket, metadata); } @@ -94,7 +93,7 @@ protected InternalFilters mutateInstance(InternalFilters instance) { case 0 -> name += randomAlphaOfLength(5); case 1 -> { buckets = new ArrayList<>(buckets); - buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY, keyed, keyedBucket)); + buckets.add(new InternalBucket("test", randomIntBetween(0, 1000), InternalAggregations.EMPTY)); } default -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java index 9e6829139d772..5eb1500e37269 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java @@ -106,7 +106,7 @@ private InternalDateHistogram createTestInstance( // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { long key = startingDate + intervalMillis * i; - buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations)); + buckets.add(new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -181,13 +181,7 @@ protected InternalDateHistogram mutateInstance(InternalDateHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalDateHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java index db93bc5dfe179..f97a836712e36 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java @@ -74,7 +74,7 @@ protected InternalHistogram createTestInstance(String name, Map // rarely leave some holes to be filled up with empty buckets in case minDocCount is set to 0 if (frequently()) { final int docCount = TestUtil.nextInt(random(), 1, 50); - buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations)); + buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, format, aggregations)); } } BucketOrder order = BucketOrder.key(randomBoolean()); @@ -96,7 +96,7 @@ public void testHandlesNaN() { newBuckets.addAll(buckets.subList(0, buckets.size() - 1)); } InternalHistogram.Bucket b = buckets.get(buckets.size() - 1); - newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, keyed, b.format, b.aggregations)); + newBuckets.add(new InternalHistogram.Bucket(Double.NaN, b.docCount, b.format, b.aggregations)); List reduceMe = List.of(histogram, histogram2); InternalAggregationTestCase.reduce(reduceMe, mockReduceContext(mockBuilder(reduceMe)).forPartialReduction()); @@ -171,13 +171,7 @@ protected InternalHistogram mutateInstance(InternalHistogram instance) { case 1 -> { buckets = new ArrayList<>(buckets); buckets.add( - new InternalHistogram.Bucket( - randomNonNegativeLong(), - randomIntBetween(1, 100), - keyed, - format, - InternalAggregations.EMPTY - ) + new InternalHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, InternalAggregations.EMPTY) ); } case 2 -> order = BucketOrder.count(randomBoolean()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java index 5ca78f322491b..dc5b57619676e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefixTests.java @@ -75,16 +75,7 @@ private InternalIpPrefix createTestInstance( BytesRef key = itr.next(); boolean v6 = InetAddressPoint.decode(key.bytes) instanceof Inet6Address; buckets.add( - new InternalIpPrefix.Bucket( - DocValueFormat.IP, - key, - keyed, - v6, - prefixLength, - appendPrefixLength, - randomLongBetween(0, Long.MAX_VALUE), - aggregations - ) + new InternalIpPrefix.Bucket(key, v6, prefixLength, appendPrefixLength, randomLongBetween(0, Long.MAX_VALUE), aggregations) ); } @@ -126,7 +117,6 @@ protected void assertReduced(InternalIpPrefix reduced, List in Map expectedCounts = new HashMap<>(); for (InternalIpPrefix i : inputs) { for (InternalIpPrefix.Bucket b : i.getBuckets()) { - assertThat(b.getFormat(), equalTo(DocValueFormat.IP)); long acc = expectedCounts.getOrDefault(b.getKey(), 0L); acc += b.getDocCount(); expectedCounts.put(b.getKey(), acc); @@ -146,20 +136,16 @@ protected void assertReduced(InternalIpPrefix reduced, List in public void testPartialReduceNoMinDocCount() { InternalIpPrefix.Bucket b1 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("192.168.0.1"))), false, - false, 1, false, 1, InternalAggregations.EMPTY ); InternalIpPrefix.Bucket b2 = new InternalIpPrefix.Bucket( - DocValueFormat.IP, new BytesRef(InetAddressPoint.encode(InetAddresses.forString("200.0.0.1"))), false, - false, 1, false, 2, diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java index b888e61e1bbf9..383065193c4d5 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRangeTests.java @@ -72,7 +72,7 @@ protected InternalBinaryRange createTestInstance( for (int i = 0; i < ranges.size(); ++i) { final int docCount = randomIntBetween(1, 100); final String key = (i == nullKey) ? null : randomAlphaOfLength(10); - buckets.add(new InternalBinaryRange.Bucket(format, keyed, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); + buckets.add(new InternalBinaryRange.Bucket(format, key, ranges.get(i).v1(), ranges.get(i).v2(), docCount, aggregations)); } return new InternalBinaryRange(name, format, keyed, buckets, metadata); } @@ -113,7 +113,6 @@ protected InternalBinaryRange mutateInstance(InternalBinaryRange instance) { buckets.add( new InternalBinaryRange.Bucket( format, - keyed, "range_a", new BytesRef(randomAlphaOfLengthBetween(1, 20)), new BytesRef(randomAlphaOfLengthBetween(1, 20)), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java index 255ad7c4417b3..fdfffaf8fb8e7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalDateRangeTests.java @@ -81,7 +81,7 @@ protected InternalDateRange createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalDateRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalDateRange(name, buckets, format, keyed, metadata); } @@ -105,9 +105,7 @@ protected InternalDateRange mutateInstance(InternalDateRange instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalDateRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java index 49144ec2f40fb..dcb41322a9426 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalGeoDistanceTests.java @@ -63,7 +63,7 @@ protected InternalGeoDistance createTestInstance( int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations, keyed)); + buckets.add(new InternalGeoDistance.Bucket("range_" + i, from, to, docCount, aggregations)); } return new InternalGeoDistance(name, buckets, keyed, metadata); } @@ -86,9 +86,7 @@ protected InternalGeoDistance mutateInstance(InternalGeoDistance instance) { buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false) - ); + buckets.add(new InternalGeoDistance.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java index da0fbd94d6ed6..0d957255b6416 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/InternalRangeTests.java @@ -76,7 +76,7 @@ public void setUp() throws Exception { int docCount = randomIntBetween(0, 1000); double from = range.v1(); double to = range.v2(); - buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, keyed, format)); + buckets.add(new InternalRange.Bucket("range_" + i, from, to, docCount, aggregations, format)); } return new InternalRange<>(name, buckets, format, keyed, metadata); } @@ -100,9 +100,7 @@ protected Class interna buckets = new ArrayList<>(buckets); double from = randomDouble(); double to = from + randomDouble(); - buckets.add( - new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, false, format) - ); + buckets.add(new InternalRange.Bucket("range_a", from, to, randomNonNegativeLong(), InternalAggregations.EMPTY, format)); } case 3 -> { if (metadata == null) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java index 9f667b3efcb61..b2f79c02baf8d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpersTests.java @@ -81,11 +81,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return new Object[0]; @@ -161,11 +156,6 @@ public InternalAggregations getAggregations() { return null; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return null; - } - @Override public Object getProperty(String containingAggName, List path) { return mock(InternalTDigestPercentiles.class); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java index 0f732d2017c74..c6bfb5b1b2778 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/multiterms/InternalMultiTerms.java @@ -122,17 +122,16 @@ public InternalAggregations getAggregations() { } @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + public void bucketToXContent(XContentBuilder builder, Params params, boolean showDocCountError) throws IOException { builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), getKey()); builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), getKeyAsString()); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); - if (getShowDocCountError()) { + if (showDocCountError) { builder.field(DOC_COUNT_ERROR_UPPER_BOUND_FIELD_NAME.getPreferredName(), getDocCountError()); } aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } @Override @@ -589,7 +588,7 @@ public List getBuckets() { @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - return doXContentCommon(builder, params, docCountError, otherDocCount, buckets); + return doXContentCommon(builder, params, showTermDocCountError, docCountError, otherDocCount, buckets); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java index 7ef7a8f4e6dd5..95b6a18182f9b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregation.java @@ -142,8 +142,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + private void bucketToXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(CommonFields.DOC_COUNT.getPreferredName(), serializableCategory.getNumMatches()); builder.field(CommonFields.KEY.getPreferredName()); @@ -152,7 +151,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(CategoryDefinition.MAX_MATCHING_LENGTH.getPreferredName(), serializableCategory.maxMatchingStringLen()); aggregations.toXContentInternal(builder, params); builder.endObject(); - return builder; } BucketKey getRawKey() { @@ -280,7 +278,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); for (Bucket bucket : buckets) { - bucket.toXContent(builder, params); + bucket.bucketToXContent(builder, params); } builder.endArray(); return builder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java index c97166ac6fd80..39bdb69d4da40 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointBucket.java @@ -12,12 +12,13 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket { +public class ChangePointBucket extends InternalMultiBucketAggregation.InternalBucket implements ToXContent { private final Object key; private final long docCount; private final InternalAggregations aggregations; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 561076c302eda..1604c47ac4754 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -36,7 +36,7 @@ public final class AggregationTestUtils { private AggregationTestUtils() {} static InternalHistogram.Bucket createHistogramBucket(long timestamp, long docCount, List subAggregations) { - return new InternalHistogram.Bucket(timestamp, docCount, false, DocValueFormat.RAW, createAggs(subAggregations)); + return new InternalHistogram.Bucket(timestamp, docCount, DocValueFormat.RAW, createAggs(subAggregations)); } static InternalComposite.InternalBucket createCompositeBucket( diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba25a774ff540..e33c1cc30f355 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -444,20 +444,14 @@ private static InternalAggregation unrollMultiBucket( long key = ((InternalDateHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalDateHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalDateHistogram.Bucket( - key, - bucketCount, - ((InternalDateHistogram.Bucket) bucket).getKeyed(), - formatter, - subAggs - ); + return new InternalDateHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof InternalHistogram) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { long key = ((InternalHistogram) rolled).getKey(bucket).longValue(); DocValueFormat formatter = ((InternalHistogram.Bucket) bucket).getFormatter(); assert bucketCount >= 0; - return new InternalHistogram.Bucket(key, bucketCount, ((InternalHistogram.Bucket) bucket).getKeyed(), formatter, subAggs); + return new InternalHistogram.Bucket(key, bucketCount, formatter, subAggs); }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 411a4cda868f0..f9fed2b8f6a7d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.ql.execution.search.FieldExtraction; import org.elasticsearch.xpack.ql.execution.search.extractor.AbstractFieldHitExtractor; import org.elasticsearch.xpack.ql.execution.search.extractor.BucketExtractor; @@ -360,11 +359,6 @@ static class ImplicitGroupActionListener extends BaseAggActionListener { private static final List EMPTY_BUCKET = singletonList(new Bucket() { - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new SqlIllegalArgumentException("No group-by/aggs defined"); - } - @Override public Object getKey() { throw new SqlIllegalArgumentException("No group-by/aggs defined"); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java index cb832cbd4b2d4..8f8f5917ae123 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/extractor/TestBucket.java @@ -8,9 +8,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation.Bucket; -import org.elasticsearch.xcontent.XContentBuilder; -import java.io.IOException; import java.util.Map; class TestBucket implements Bucket { @@ -25,11 +23,6 @@ class TestBucket implements Bucket { this.aggs = aggs; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public Map getKey() { return key; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java index 681ec38e9a57a..7359071996cc8 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/AggregationResultUtilsTests.java @@ -918,14 +918,14 @@ public void testRangeAggExtractor() { Aggregation agg = createRangeAgg( "p_agg", List.of( - new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, false, DocValueFormat.RAW), - new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, false, DocValueFormat.RAW) + new InternalRange.Bucket(null, Double.NEGATIVE_INFINITY, 10.5, 10, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 10.5, 19.5, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 19.5, 200, 30, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, 20, Double.POSITIVE_INFINITY, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -10, -5, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, -6.0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket(null, -11.0, 0, 0, InternalAggregations.EMPTY, DocValueFormat.RAW), + new InternalRange.Bucket("custom-0", 0, 10, 777, InternalAggregations.EMPTY, DocValueFormat.RAW) ) ); assertThat( From 546e8e91e6f6f5cd5e5343a0874239026701fff4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20Fred=C3=A9n?= <109296772+jfreden@users.noreply.github.com> Date: Fri, 22 Nov 2024 08:20:35 +0100 Subject: [PATCH 26/50] Bump major version for feature migration system indices (#117243) * Bump major version for feature upgrade system indices --- docs/changelog/117243.yaml | 5 +++++ .../elasticsearch/migration/FeatureMigrationIT.java | 8 ++++---- .../migration/MultiFeatureMigrationIT.java | 10 +++++----- .../TransportGetFeatureUpgradeStatusAction.java | 8 ++------ .../java/org/elasticsearch/indices/SystemIndices.java | 2 +- 5 files changed, 17 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/117243.yaml diff --git a/docs/changelog/117243.yaml b/docs/changelog/117243.yaml new file mode 100644 index 0000000000000..f871d476bd0ec --- /dev/null +++ b/docs/changelog/117243.yaml @@ -0,0 +1,5 @@ +pr: 117243 +summary: Bump major version for feature migration system indices +area: Infra/Core +type: upgrade +issues: [] diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 3905edae46c2f..a4aa0514bb47a 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -208,7 +208,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -216,7 +216,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -224,7 +224,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -232,7 +232,7 @@ public void testMigrateInternalManagedSystemIndex() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 1ee5519593569..3442e9dc43925 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -218,7 +218,7 @@ public void testMultipleFeatureMigration() throws Exception { // Finally, verify that all the indices exist and have the properties we expect. assertIndexHasCorrectProperties( finalMetadata, - ".int-man-old-reindexed-for-8", + ".int-man-old-reindexed-for-9", INTERNAL_MANAGED_FLAG_VALUE, true, true, @@ -226,7 +226,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".int-unman-old-reindexed-for-8", + ".int-unman-old-reindexed-for-9", INTERNAL_UNMANAGED_FLAG_VALUE, false, true, @@ -234,7 +234,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-man-old-reindexed-for-8", + ".ext-man-old-reindexed-for-9", EXTERNAL_MANAGED_FLAG_VALUE, true, false, @@ -242,7 +242,7 @@ public void testMultipleFeatureMigration() throws Exception { ); assertIndexHasCorrectProperties( finalMetadata, - ".ext-unman-old-reindexed-for-8", + ".ext-unman-old-reindexed-for-9", EXTERNAL_UNMANAGED_FLAG_VALUE, false, false, @@ -251,7 +251,7 @@ public void testMultipleFeatureMigration() throws Exception { assertIndexHasCorrectProperties( finalMetadata, - ".second-int-man-old-reindexed-for-8", + ".second-int-man-old-reindexed-for-9", SECOND_FEATURE_IDX_FLAG_VALUE, true, true, diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java index e2475bca31d53..afe615add28df 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportGetFeatureUpgradeStatusAction.java @@ -19,7 +19,6 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.indices.SystemIndices; @@ -56,15 +55,13 @@ public class TransportGetFeatureUpgradeStatusAction extends TransportMasterNodeA /** * Once all feature migrations for 8.x -> 9.x have been tested, we can bump this to Version.V_8_0_0 */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) - public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_7_0_0; - public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_7_0_0; + public static final Version NO_UPGRADE_REQUIRED_VERSION = Version.V_8_0_0; + public static final IndexVersion NO_UPGRADE_REQUIRED_INDEX_VERSION = IndexVersions.V_8_0_0; private final SystemIndices systemIndices; PersistentTasksService persistentTasksService; @Inject - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // Once we begin working on 9.x, we need to update our migration classes public TransportGetFeatureUpgradeStatusAction( TransportService transportService, ThreadPool threadPool, @@ -149,7 +146,6 @@ static GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus getFeatureUpgradeSta .map(idxInfo -> ERROR) .map(idxStatus -> GetFeatureUpgradeStatusResponse.UpgradeStatus.combine(idxStatus, initialStatus)) .orElse(initialStatus); - return new GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus(featureName, minimumVersion, status, indexInfos); } diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java index 856b30d1c19e8..42cda4da1a9e6 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndices.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndices.java @@ -110,7 +110,7 @@ public class SystemIndices { public static final String SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_system_index_access_allowed"; public static final String EXTERNAL_SYSTEM_INDEX_ACCESS_CONTROL_HEADER_KEY = "_external_system_index_access_origin"; - public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-8"; + public static final String UPGRADED_INDEX_SUFFIX = "-reindexed-for-9"; private static final Automaton EMPTY = Automata.makeEmpty(); From 2ac267de3a0f14f62d426ae47b4dc1adc1fe9161 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Fri, 22 Nov 2024 10:02:03 +0100 Subject: [PATCH 27/50] [ci] Add debian-12 to matrix in packaging and platform jobs (#116172) Lintian test has been changed to parse the result instead of using exit code. This was required, because now `mismatched-override` is non-erasable tag which cannot be ignored for exit code. Lintian introduced non-backward-compatible format change for overrides file. Because of that, some overrides are now duplicated in a format for older versions. Additionally, Lintian overrides file has been cleaned up to remove the tags which are no longer failing. --- .../pipelines/periodic-packaging.template.yml | 1 + .buildkite/pipelines/periodic-packaging.yml | 1 + .../pipelines/periodic-platform-support.yml | 1 + .../pull-request/packaging-tests-unix.yml | 3 + distribution/packages/build.gradle | 1 - .../packages/src/deb/lintian/elasticsearch | 57 ++++++++++--------- .../packaging/test/DebMetadataTests.java | 40 ++++++++++--- .../packaging/util/LintianResultParser.java | 54 ++++++++++++++++++ 8 files changed, 122 insertions(+), 36 deletions(-) create mode 100644 qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java diff --git a/.buildkite/pipelines/periodic-packaging.template.yml b/.buildkite/pipelines/periodic-packaging.template.yml index 081d059460653..1a1e46d55f7a4 100644 --- a/.buildkite/pipelines/periodic-packaging.template.yml +++ b/.buildkite/pipelines/periodic-packaging.template.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index 162a7e4995467..a49e486176484 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -9,6 +9,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index f9f75488f0917..79e5a2e8dcdbb 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -8,6 +8,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 04ccc41891b3b..8bec706bb758d 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -11,6 +11,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -38,6 +39,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 @@ -65,6 +67,7 @@ steps: setup: image: - debian-11 + - debian-12 - opensuse-leap-15 - oraclelinux-7 - oraclelinux-8 diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 918980fea616a..7d60137ac86b1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -335,7 +335,6 @@ Closure commonDebConfig(String architecture) { // versions found on oldest supported distro, centos-6 requires('bash', '4.1', GREATER | EQUAL) - requires('lsb-base', '4', GREATER | EQUAL) requires 'libc6' requires 'adduser' diff --git a/distribution/packages/src/deb/lintian/elasticsearch b/distribution/packages/src/deb/lintian/elasticsearch index edd705b66caaa..1622d8d8aeb40 100644 --- a/distribution/packages/src/deb/lintian/elasticsearch +++ b/distribution/packages/src/deb/lintian/elasticsearch @@ -5,8 +5,6 @@ changelog-file-missing-in-native-package # we intentionally copy our copyright file for all deb packages -copyright-file-contains-full-apache-2-license -copyright-not-using-common-license-for-apache2 copyright-without-copyright-notice # we still put all our files under /usr/share/elasticsearch even after transition to platform dependent packages @@ -16,37 +14,23 @@ arch-dependent-file-in-usr-share missing-dep-on-jarwrapper # we prefer to not make our config and log files world readable -non-standard-file-perm etc/default/elasticsearch 0660 != 0644 -non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 -non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 -non-standard-file-perm etc/elasticsearch/* -non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 -non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 - -# this lintian tag is simply wrong; contrary to the explanation, Debian systemd -# does actually look at /usr/lib/systemd/system -systemd-service-file-outside-lib usr/lib/systemd/system/elasticsearch.service +non-standard-file-perm 0660 != 0644 [etc/default/elasticsearch] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [etc/elasticsearch/jvm.options.d/] +non-standard-file-perm 0660 != 0644 [etc/elasticsearch/*] +non-standard-dir-perm 2750 != 0755 [var/lib/elasticsearch/] +non-standard-dir-perm 2750 != 0755 [var/log/elasticsearch/] # the package scripts handle systemd directly and don't need to use deb helpers maintainer-script-calls-systemctl # bundled JDK embedded-library -unstripped-binary-or-object usr/share/elasticsearch/jdk/* -extra-license-file usr/share/elasticsearch/jdk/legal/* -hardening-no-pie usr/share/elasticsearch/jdk/bin/* -hardening-no-pie usr/share/elasticsearch/jdk/lib/* +unstripped-binary-or-object [usr/share/elasticsearch/jdk/*] # the system java version that lintian assumes is far behind what elasticsearch uses unknown-java-class-version -# elastic licensed modules contain elastic license -extra-license-file usr/share/elasticsearch/modules/* - -# This dependency appears to have a packaging flaw, and includes a -# generated source file alongside the compiled version -jar-contains-source usr/share/elasticsearch/modules/repository-gcs/api-common*.jar * - # There's no `License` field in Debian control files, but earlier versions # of `lintian` were more permissive. Override this warning so that we can # run `lintian` on different releases of Debian. The format of this override @@ -58,8 +42,27 @@ unknown-field License # indirectly to libc via libdl. This might not be best practice but we # don't build them ourselves and the license precludes us modifying them # to fix this. -library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so +library-not-linked-against-libc [usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so*] + + +# Below is the copy of some of the above rules in format for Lintian versions <= 2.104 (Debian 11) +# Override syntax changes between Lintian versions in a non-backwards compatible way, so we handle it with +# duplication and ignoring some issues in the test code. + + +# we prefer to not make our config and log files world readable +non-standard-file-perm etc/default/elasticsearch 0660 != 0644 +non-standard-dir-perm etc/elasticsearch/ 2750 != 0755 +non-standard-dir-perm etc/elasticsearch/jvm.options.d/ 2750 != 0755 +non-standard-file-perm etc/elasticsearch/* +non-standard-dir-perm var/lib/elasticsearch/ 2750 != 0755 +non-standard-dir-perm var/log/elasticsearch/ 2750 != 0755 -# shared-lib-without-dependency-information (now shared-library-lacks-prerequisites) is falsely reported for libvec.so -# which has no dependencies (not even libc) besides the symbols in the base executable. -shared-lib-without-dependency-information usr/share/elasticsearch/lib/platform/linux-x64/libvec.so +# bundled JDK +unstripped-binary-or-object usr/share/elasticsearch/jdk/* + +# Intel MKL libraries are not linked directly to libc. They are linked +# indirectly to libc via libdl. This might not be best practice but we +# don't build them ourselves and the license precludes us modifying them +# to fix this. +library-not-linked-against-libc usr/share/elasticsearch/modules/x-pack-ml/platform/linux-x86_64/lib/libmkl_*.so* diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java index a60e58c34918b..9f9aa78a4910b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DebMetadataTests.java @@ -12,18 +12,31 @@ import junit.framework.TestCase; import org.elasticsearch.packaging.util.Distribution; -import org.elasticsearch.packaging.util.FileUtils; +import org.elasticsearch.packaging.util.LintianResultParser; +import org.elasticsearch.packaging.util.LintianResultParser.Issue; +import org.elasticsearch.packaging.util.LintianResultParser.Result; import org.elasticsearch.packaging.util.Shell; import org.junit.BeforeClass; +import java.util.List; import java.util.Locale; import java.util.regex.Pattern; +import java.util.stream.Collectors; import static org.elasticsearch.packaging.util.FileUtils.getDistributionFile; import static org.junit.Assume.assumeTrue; public class DebMetadataTests extends PackagingTestCase { + private final LintianResultParser lintianParser = new LintianResultParser(); + private static final List IGNORED_TAGS = List.of( + // Override syntax changes between lintian versions in a non-backwards compatible way, so we have to tolerate these. + // Tag mismatched-override is a non-erasable tag which cannot be ignored with overrides, so we handle it here. + "mismatched-override", + // systemd-service-file-outside-lib has been incorrect and removed in the newer version on Lintian + "systemd-service-file-outside-lib" + ); + @BeforeClass public static void filterDistros() { assumeTrue("only deb", distribution.packaging == Distribution.Packaging.DEB); @@ -35,15 +48,26 @@ public void test05CheckLintian() { if (helpText.contains("--fail-on-warnings")) { extraArgs = "--fail-on-warnings"; } else if (helpText.contains("--fail-on error")) { - extraArgs = "--fail-on warning"; - // Recent lintian versions are picky about malformed or mismatched overrides. - // Unfortunately override syntax changes between lintian versions in a non-backwards compatible - // way, so we have to tolerate these (or maintain separate override files per lintian version). - if (helpText.contains("--suppress-tags")) { - extraArgs += " --suppress-tags malformed-override,mismatched-override"; + extraArgs = "--fail-on error,warning"; + } + Shell.Result result = sh.runIgnoreExitCode( + String.format(Locale.ROOT, "lintian %s %s", extraArgs, getDistributionFile(distribution())) + ); + Result lintianResult = lintianParser.parse(result.stdout()); + // Unfortunately Lintian overrides syntax changes between Lintian versions in a non-backwards compatible + // way, so we have to manage some exclusions outside the overrides file. + if (lintianResult.isSuccess() == false) { + List importantIssues = lintianResult.issues() + .stream() + .filter(issue -> IGNORED_TAGS.contains(issue.tag()) == false) + .toList(); + if (importantIssues.isEmpty() == false) { + fail( + "Issues for DEB package found by Lintian:\n" + + importantIssues.stream().map(Record::toString).collect(Collectors.joining("\n")) + ); } } - sh.run(String.format(Locale.ROOT, "lintian %s %s", extraArgs, FileUtils.getDistributionFile(distribution()))); } public void test06Dependencies() { diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java new file mode 100644 index 0000000000000..511080427ea77 --- /dev/null +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/LintianResultParser.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.packaging.util; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; + +import java.util.Arrays; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class LintianResultParser { + + private static final Logger logger = LogManager.getLogger(LintianResultParser.class); + private static final Pattern RESULT_PATTERN = Pattern.compile("(?[EW]): (?\\S+): (?\\S+) (?.+)"); + + public Result parse(String output) { + String[] lines = output.split("\n"); + List issues = Arrays.stream(lines).map(line -> { + Matcher matcher = RESULT_PATTERN.matcher(line); + if (matcher.matches() == false) { + logger.info("Lintian output not matching expected pattern: {}", line); + return null; + } + Severity severity = switch (matcher.group("severity")) { + case "E" -> Severity.ERROR; + case "W" -> Severity.WARNING; + default -> Severity.UNKNOWN; + }; + return new Issue(severity, matcher.group("tag"), matcher.group("message")); + }).filter(Objects::nonNull).toList(); + + return new Result(issues.stream().noneMatch(it -> it.severity == Severity.ERROR || it.severity == Severity.WARNING), issues); + } + + public record Result(boolean isSuccess, List issues) {} + + public record Issue(Severity severity, String tag, String message) {} + + enum Severity { + ERROR, + WARNING, + UNKNOWN + } +} From 6ea3e01958cfe355475fe605a49fb731294a8c2b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Slobodan=20Adamovi=C4=87?= Date: Fri, 22 Nov 2024 11:39:25 +0100 Subject: [PATCH 28/50] Upgrade Bouncy Castle FIPS dependencies (#112989) This PR updates `bc-fips` and `bctls-fips` dependencies to the latest minor versions. --- .../src/main/groovy/elasticsearch.fips.gradle | 8 ++++---- .../src/main/resources/fips_java.policy | 3 ++- distribution/tools/plugin-cli/build.gradle | 2 +- docs/changelog/112989.yaml | 5 +++++ .../security/fips-140-compliance.asciidoc | 4 ++-- gradle/verification-metadata.xml | 18 +++++++++--------- plugins/discovery-ec2/build.gradle | 1 + .../src/main/resources/fips/fips_java.policy | 3 ++- x-pack/plugin/core/build.gradle | 2 +- .../core/ssl/RestrictedTrustManagerTests.java | 2 +- .../ssl/SslClientAuthenticationTests.java | 2 +- ...mpleSecurityNetty4ServerTransportTests.java | 6 +++++- 12 files changed, 34 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/112989.yaml diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 493f7a505bb5b..3c9cf121813c9 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -25,12 +25,12 @@ if (buildParams.inFipsJvm) { File fipsSecurity = new File(fipsResourcesDir, javaSecurityFilename) File fipsPolicy = new File(fipsResourcesDir, 'fips_java.policy') File fipsTrustStore = new File(fipsResourcesDir, 'cacerts.bcfks') - def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.4') - def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17') + def bcFips = dependencies.create('org.bouncycastle:bc-fips:1.0.2.5') + def bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19') def manualDebug = false; //change this to manually debug bouncy castle in an IDE if(manualDebug) { - bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.4') - bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.17'){ + bcFips = dependencies.create('org.bouncycastle:bc-fips-debug:1.0.2.5') + bcTlsFips = dependencies.create('org.bouncycastle:bctls-fips:1.0.19'){ exclude group: 'org.bouncycastle', module: 'bc-fips' // to avoid jar hell } } diff --git a/build-tools-internal/src/main/resources/fips_java.policy b/build-tools-internal/src/main/resources/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/build-tools-internal/src/main/resources/fips_java.policy +++ b/build-tools-internal/src/main/resources/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index ac8ade89c9014..57750f2162a71 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -29,7 +29,7 @@ dependencies { implementation 'org.ow2.asm:asm-tree:9.7' api "org.bouncycastle:bcpg-fips:1.0.7.1" - api "org.bouncycastle:bc-fips:1.0.2.4" + api "org.bouncycastle:bc-fips:1.0.2.5" testImplementation project(":test:framework") testImplementation "com.google.jimfs:jimfs:${versions.jimfs}" testRuntimeOnly "com.google.guava:guava:${versions.jimfs_guava}" diff --git a/docs/changelog/112989.yaml b/docs/changelog/112989.yaml new file mode 100644 index 0000000000000..364f012f94420 --- /dev/null +++ b/docs/changelog/112989.yaml @@ -0,0 +1,5 @@ +pr: 112989 +summary: Upgrade Bouncy Castle FIPS dependencies +area: Security +type: upgrade +issues: [] diff --git a/docs/reference/security/fips-140-compliance.asciidoc b/docs/reference/security/fips-140-compliance.asciidoc index 5bf73d43541d6..dec17927e62b8 100644 --- a/docs/reference/security/fips-140-compliance.asciidoc +++ b/docs/reference/security/fips-140-compliance.asciidoc @@ -53,8 +53,8 @@ https://docs.oracle.com/en/java/javase/17/security/java-cryptography-architectur https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html[JSSE] implementation is required so that the JVM uses FIPS validated implementations of NIST recommended cryptographic algorithms. -Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.4/bc-fips-1.0.2.4.jar[bc-fips 1.0.2.4] -and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.17/bctls-fips-1.0.17.jar[bctls-fips 1.0.17]. +Elasticsearch has been tested with Bouncy Castle's https://repo1.maven.org/maven2/org/bouncycastle/bc-fips/1.0.2.5/bc-fips-1.0.2.5.jar[bc-fips 1.0.2.5] +and https://repo1.maven.org/maven2/org/bouncycastle/bctls-fips/1.0.19/bctls-fips-1.0.19.jar[bctls-fips 1.0.19]. Please refer to the {es} https://www.elastic.co/support/matrix#matrix_jvm[JVM support matrix] for details on which combinations of JVM and security provider are supported in FIPS mode. Elasticsearch does not ship with a FIPS certified provider. It is the responsibility of the user to install and configure the security provider to ensure compliance with FIPS 140-2. Using a FIPS certified provider will ensure that only diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 3f56071f6f495..2f465e06a662a 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -3288,14 +3288,14 @@ - - - + + + - - - + + + @@ -3333,9 +3333,9 @@ - - - + + + diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a166a89ad4026..f281db5279660 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -77,6 +77,7 @@ tasks.register("writeTestJavaPolicy") { "permission java.security.SecurityPermission \"getProperty.jdk.tls.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.jdk.certpath.disabledAlgorithms\";", "permission java.security.SecurityPermission \"getProperty.keystore.type.compat\";", + "permission java.security.SecurityPermission \"getProperty.org.bouncycastle.ec.max_f2m_field_size\";", "};" ].join("\n") ) diff --git a/test/test-clusters/src/main/resources/fips/fips_java.policy b/test/test-clusters/src/main/resources/fips/fips_java.policy index c259b0bc908d8..781e1247db7a5 100644 --- a/test/test-clusters/src/main/resources/fips/fips_java.policy +++ b/test/test-clusters/src/main/resources/fips/fips_java.policy @@ -5,6 +5,7 @@ grant { permission java.security.SecurityPermission "getProperty.jdk.tls.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.certpath.disabledAlgorithms"; permission java.security.SecurityPermission "getProperty.jdk.tls.server.defaultDHEParameters"; + permission java.security.SecurityPermission "getProperty.org.bouncycastle.ec.max_f2m_field_size"; permission java.lang.RuntimePermission "getProtectionDomain"; permission java.util.PropertyPermission "java.runtime.name", "read"; permission org.bouncycastle.crypto.CryptoServicesPermission "tlsAlgorithmsEnabled"; @@ -20,6 +21,6 @@ grant { }; // rely on the caller's socket permissions, the JSSE TLS implementation here is always allowed to connect -grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.17.jar" { +grant codeBase "file:${jdk.module.path}/bctls-fips-1.0.19.jar" { permission java.net.SocketPermission "*", "connect"; }; diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index b4f17cb436df5..d4c3f67bf3ebb 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -65,7 +65,7 @@ dependencies { testImplementation project(path: ':modules:rest-root') testImplementation project(path: ':modules:health-shards-availability') // Needed for Fips140ProviderVerificationTests - testCompileOnly('org.bouncycastle:bc-fips:1.0.2.4') + testCompileOnly('org.bouncycastle:bc-fips:1.0.2.5') testImplementation(project(':x-pack:license-tools')) { transitive = false diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java index bbf80279b0b2a..60db8b6522518 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/RestrictedTrustManagerTests.java @@ -218,7 +218,7 @@ public void testThatDelegateTrustManagerIsRespected() throws Exception { if (cert.endsWith("/ca")) { assertTrusted(trustManager, cert); } else { - assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to find certificate chain." : "PKIX path building failed.*"); + assertNotValid(trustManager, cert, inFipsJvm() ? "Unable to construct a valid chain" : "PKIX path building failed.*"); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java index bc01b0693af0a..2851af1461012 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/ssl/SslClientAuthenticationTests.java @@ -107,7 +107,7 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { if (inFipsJvm()) { Throwable t = ExceptionsHelper.unwrap(e, CertificateException.class); assertThat(t, instanceOf(CertificateException.class)); - assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + assertThat(t.getMessage(), containsString("Unable to construct a valid chain")); } else { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java index c5c5e14934408..e381663d4174e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SimpleSecurityNetty4ServerTransportTests.java @@ -571,7 +571,11 @@ public void testClientChannelUsesSeparateSslConfigurationForRemoteCluster() thro final ConnectTransportException e = openConnectionExpectFailure(qcService, node, connectionProfile); assertThat( e.getRootCause().getMessage(), - anyOf(containsString("unable to find valid certification path"), containsString("Unable to find certificate chain")) + anyOf( + containsString("unable to find valid certification path"), + containsString("Unable to find certificate chain"), + containsString("Unable to construct a valid chain") + ) ); } From a1247b3e60f7fd0df1a980a1d8a7a8a0e1760661 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Fri, 22 Nov 2024 12:12:09 +0100 Subject: [PATCH 29/50] ES|QL: fix validation of SORT by aggregate functions (#117316) --- docs/changelog/117316.yaml | 5 +++ .../kibana/definition/match_operator.json | 36 +++++++++++++++++++ .../functions/types/match_operator.asciidoc | 2 ++ .../xpack/esql/analysis/Verifier.java | 13 +++++++ .../xpack/esql/analysis/VerifierTests.java | 7 ++++ 5 files changed, 63 insertions(+) create mode 100644 docs/changelog/117316.yaml diff --git a/docs/changelog/117316.yaml b/docs/changelog/117316.yaml new file mode 100644 index 0000000000000..69474d68a8190 --- /dev/null +++ b/docs/changelog/117316.yaml @@ -0,0 +1,5 @@ +pr: 117316 +summary: Fix validation of SORT by aggregate functions +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/match_operator.json b/docs/reference/esql/functions/kibana/definition/match_operator.json index 2facebfc44e57..7a0ace6168b59 100644 --- a/docs/reference/esql/functions/kibana/definition/match_operator.json +++ b/docs/reference/esql/functions/kibana/definition/match_operator.json @@ -22,6 +22,42 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "field", + "type" : "keyword", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "text", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, + { + "params" : [ + { + "name" : "field", + "type" : "text", + "optional" : false, + "description" : "Field that the query will target." + }, + { + "name" : "query", + "type" : "keyword", + "optional" : false, + "description" : "Text you wish to find in the provided field." + } + ], + "variadic" : false, + "returnType" : "boolean" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/types/match_operator.asciidoc b/docs/reference/esql/functions/types/match_operator.asciidoc index 5c6afacdce1b2..7523b29c62b1d 100644 --- a/docs/reference/esql/functions/types/match_operator.asciidoc +++ b/docs/reference/esql/functions/types/match_operator.asciidoc @@ -6,5 +6,7 @@ |=== field | query | result keyword | keyword | boolean +keyword | text | boolean +text | keyword | boolean text | text | boolean |=== diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java index 694328e57b5ae..3ebb52641232e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Verifier.java @@ -215,6 +215,7 @@ else if (p instanceof Lookup lookup) { checkOperationsOnUnsignedLong(p, failures); checkBinaryComparison(p, failures); checkForSortableDataTypes(p, failures); + checkSort(p, failures); checkFullTextQueryFunctions(p, failures); }); @@ -232,6 +233,18 @@ else if (p instanceof Lookup lookup) { return failures; } + private void checkSort(LogicalPlan p, Set failures) { + if (p instanceof OrderBy ob) { + ob.order().forEach(o -> { + o.forEachDown(Function.class, f -> { + if (f instanceof AggregateFunction) { + failures.add(fail(f, "Aggregate functions are not allowed in SORT [{}]", f.functionName())); + } + }); + }); + } + } + private static void checkFilterConditionType(LogicalPlan p, Set localFailures) { if (p instanceof Filter f) { Expression condition = f.condition(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 8da6863465d39..53c6e9a5fd841 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1793,6 +1793,13 @@ public void testCategorizeWithinAggregations() { ); } + public void testSortByAggregate() { + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT count(*)")); + assertEquals("1:28: Aggregate functions are not allowed in SORT [COUNT]", error("ROW a = 1 | SORT to_string(count(*))")); + assertEquals("1:22: Aggregate functions are not allowed in SORT [MAX]", error("ROW a = 1 | SORT 1 + max(a)")); + assertEquals("1:18: Aggregate functions are not allowed in SORT [COUNT]", error("FROM test | SORT count(*)")); + } + private void query(String query) { defaultAnalyzer.analyze(parser.createStatement(query)); } From bcb29b797d8dcb61aca7f2beb922839277326def Mon Sep 17 00:00:00 2001 From: Pooya Salehi Date: Fri, 22 Nov 2024 12:31:40 +0100 Subject: [PATCH 30/50] Preserve thread context when waiting for segment generation in RTG (#117148) The multi-get counterpart of https://github.com/elastic/elasticsearch/pull/114623. --- docs/changelog/117148.yaml | 5 +++++ .../action/get/TransportShardMultiGetAction.java | 9 +++++---- 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/117148.yaml diff --git a/docs/changelog/117148.yaml b/docs/changelog/117148.yaml new file mode 100644 index 0000000000000..92dd69672616a --- /dev/null +++ b/docs/changelog/117148.yaml @@ -0,0 +1,5 @@ +pr: 117148 +summary: Preserve thread context when waiting for segment generation in RTG +area: CRUD +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java index 633e7ef6793ab..93e1b18ec64c6 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportShardMultiGetAction.java @@ -19,6 +19,7 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.admin.indices.refresh.TransportShardRefreshAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.replication.BasicReplicationRequest; import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; @@ -280,15 +281,15 @@ private void tryShardMultiGetFromTranslog( } else { assert r.segmentGeneration() > -1L; assert r.primaryTerm() > Engine.UNKNOWN_PRIMARY_TERM; - indexShard.waitForPrimaryTermAndGeneration( - r.primaryTerm(), - r.segmentGeneration(), + final ActionListener termAndGenerationListener = ContextPreservingActionListener.wrapPreservingContext( listener.delegateFailureAndWrap( (ll, aLong) -> getExecutor(request, shardId).execute( ActionRunnable.supply(ll, () -> handleLocalGets(request, r.multiGetShardResponse(), shardId)) ) - ) + ), + threadPool.getThreadContext() ); + indexShard.waitForPrimaryTermAndGeneration(r.primaryTerm(), r.segmentGeneration(), termAndGenerationListener); } } }), TransportShardMultiGetFomTranslogAction.Response::new, getExecutor(request, shardId)) From d26a772f6e26cead6d1d130be1298c0e67943c51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Iv=C3=A1n=20Cea=20Fontenla?= Date: Fri, 22 Nov 2024 12:51:37 +0100 Subject: [PATCH 31/50] Unmute Categorize VerifierTests and require snapshot on them (#117016) Fixes https://github.com/elastic/elasticsearch/issues/116856 Fixes https://github.com/elastic/elasticsearch/issues/116857 Fixes https://github.com/elastic/elasticsearch/issues/116858 --- muted-tests.yml | 9 --------- .../elasticsearch/xpack/esql/analysis/VerifierTests.java | 6 ++++++ 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index b88bff86a0fbe..8be390e670c9b 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -208,15 +208,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/116777 - class: org.elasticsearch.xpack.searchablesnapshots.hdfs.SecureHdfsSearchableSnapshotsIT issue: https://github.com/elastic/elasticsearch/issues/116851 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeWithinAggregations - issue: https://github.com/elastic/elasticsearch/issues/116856 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeSingleGrouping - issue: https://github.com/elastic/elasticsearch/issues/116857 -- class: org.elasticsearch.xpack.esql.analysis.VerifierTests - method: testCategorizeNestedGrouping - issue: https://github.com/elastic/elasticsearch/issues/116858 - class: org.elasticsearch.search.basic.SearchWithRandomIOExceptionsIT method: testRandomDirectoryIOExceptions issue: https://github.com/elastic/elasticsearch/issues/114824 diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index 53c6e9a5fd841..7b2f85b80b3b6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -1732,6 +1732,8 @@ public void testIntervalAsString() { } public void testCategorizeSingleGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(first_name)"); query("from test | STATS COUNT(*) BY cat = CATEGORIZE(first_name)"); @@ -1759,6 +1761,8 @@ public void testCategorizeSingleGrouping() { } public void testCategorizeNestedGrouping() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS COUNT(*) BY CATEGORIZE(LENGTH(first_name)::string)"); assertEquals( @@ -1772,6 +1776,8 @@ public void testCategorizeNestedGrouping() { } public void testCategorizeWithinAggregations() { + assumeTrue("requires Categorize capability", EsqlCapabilities.Cap.CATEGORIZE.isEnabled()); + query("from test | STATS MV_COUNT(cat), COUNT(*) BY cat = CATEGORIZE(first_name)"); assertEquals( From 1fe3ed1e850c12d21806061e53687c0f1bd96738 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 22 Nov 2024 13:26:30 +0100 Subject: [PATCH 32/50] Add docs for aggs filtering (#116681) Add documentation for aggs filtering (the WHERE in STATS command). Fixes: #115083 --- .../esql/processing-commands/stats.asciidoc | 49 +++++++++++++++---- .../src/main/resources/stats.csv-spec | 36 ++++++++++++++ 2 files changed, 75 insertions(+), 10 deletions(-) diff --git a/docs/reference/esql/processing-commands/stats.asciidoc b/docs/reference/esql/processing-commands/stats.asciidoc index 0c479c1f62b76..3ed296fb6db24 100644 --- a/docs/reference/esql/processing-commands/stats.asciidoc +++ b/docs/reference/esql/processing-commands/stats.asciidoc @@ -1,16 +1,18 @@ [discrete] [[esql-stats-by]] -=== `STATS ... BY` +=== `STATS` -The `STATS ... BY` processing command groups rows according to a common value +The `STATS` processing command groups rows according to a common value and calculates one or more aggregated values over the grouped rows. **Syntax** [source,esql] ---- -STATS [column1 =] expression1[, ..., [columnN =] expressionN] -[BY grouping_expression1[, ..., grouping_expressionN]] +STATS [column1 =] expression1 [WHERE boolean_expression1][, + ..., + [columnN =] expressionN [WHERE boolean_expressionN]] + [BY grouping_expression1[, ..., grouping_expressionN]] ---- *Parameters* @@ -28,14 +30,18 @@ An expression that computes an aggregated value. An expression that outputs the values to group by. If its name coincides with one of the computed columns, that column will be ignored. +`boolean_expressionX`:: +The condition that must be met for a row to be included in the evaluation of `expressionX`. + NOTE: Individual `null` values are skipped when computing aggregations. *Description* -The `STATS ... BY` processing command groups rows according to a common value -and calculate one or more aggregated values over the grouped rows. If `BY` is -omitted, the output table contains exactly one row with the aggregations applied -over the entire dataset. +The `STATS` processing command groups rows according to a common value +and calculates one or more aggregated values over the grouped rows. For the +calculation of each aggregated value, the rows in a group can be filtered with +`WHERE`. If `BY` is omitted, the output table contains exactly one row with +the aggregations applied over the entire dataset. The following <> are supported: @@ -90,6 +96,29 @@ include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues] include::{esql-specs}/stats.csv-spec[tag=statsCalcMultipleValues-result] |=== +To filter the rows that go into an aggregation, use the `WHERE` clause: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFiltering] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFiltering-result] +|=== + +The aggregations can be mixed, with and without a filter and grouping is +optional as well: + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats.csv-spec[tag=aggFilteringNoGroup-result] +|=== + [[esql-stats-mv-group]] If the grouping key is multivalued then the input row is in all groups: @@ -109,7 +138,7 @@ It's also possible to group by multiple values: include::{esql-specs}/stats.csv-spec[tag=statsGroupByMultipleValues] ---- -If the all grouping keys are multivalued then the input row is in all groups: +If all the grouping keys are multivalued then the input row is in all groups: [source.merge.styled,esql] ---- @@ -121,7 +150,7 @@ include::{esql-specs}/stats.csv-spec[tag=multi-mv-group-result] |=== Both the aggregating functions and the grouping expressions accept other -functions. This is useful for using `STATS...BY` on multivalue columns. +functions. This is useful for using `STATS` on multivalue columns. For example, to calculate the average salary change, you can use `MV_AVG` to first average the multiple values per employee, and use the result with the `AVG` function: diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index cba5ace0dfe86..66c5362a24134 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -2348,6 +2348,42 @@ v:integer | job_positions:keyword 10094 | Accountant ; +docsStatsWithSimpleFiltering +required_capability: per_agg_filtering +// tag::aggFiltering[] +FROM employees +| STATS avg50s = AVG(salary)::LONG WHERE birth_date < "1960-01-01", + avg60s = AVG(salary)::LONG WHERE birth_date >= "1960-01-01" + BY gender +| SORT gender +// end::aggFiltering[] +| WHERE gender IS NOT NULL +; + +// tag::aggFiltering-result[] +avg50s:long |avg60s:long |gender:keyword +55462 |46637 |F +48279 |44879 |M +// end::aggFiltering-result[] +; + +docsStatsWithFilteringNoGroups +required_capability: per_agg_filtering +// tag::aggFilteringNoGroup[] +FROM employees +| EVAL Ks = salary / 1000 // thousands +| STATS under_40K = COUNT(*) WHERE Ks < 40, + inbetween = COUNT(*) WHERE 40 <= Ks AND Ks < 60, + over_60K = COUNT(*) WHERE 60 <= Ks, + total = COUNT(*) +// end::aggFilteringNoGroup[] +; + +// tag::aggFilteringNoGroup-result[] +under_40K:long |inbetween:long |over_60K:long |total:long +36 |39 |25 |100 +// end::aggFilteringNoGroup-result[] +; statsWithFiltering required_capability: per_agg_filtering From 7ac8d6392fb5bdac6330381a81ace8a17759b48b Mon Sep 17 00:00:00 2001 From: Jack Pan <35284546+jackpan123@users.noreply.github.com> Date: Fri, 22 Nov 2024 20:52:57 +0800 Subject: [PATCH 33/50] ESQL: Fix double lookup and HashJoinExec.addedFields (#115616) Fix a bug in HashJoinExec.addedFields that caused multiple LOOKUPs in a query to fail. --- docs/changelog/115616.yaml | 6 ++++++ .../xpack/esql/plan/physical/HashJoinExec.java | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/115616.yaml diff --git a/docs/changelog/115616.yaml b/docs/changelog/115616.yaml new file mode 100644 index 0000000000000..4fb4dc18538de --- /dev/null +++ b/docs/changelog/115616.yaml @@ -0,0 +1,6 @@ +pr: 115616 +summary: Fix double lookup failure on ESQL +area: ES|QL +type: bug +issues: + - 111398 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java index 4574c3720f8ee..5ae3702993fcb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/HashJoinExec.java @@ -91,7 +91,7 @@ public List rightFields() { public Set addedFields() { if (lazyAddedFields == null) { - lazyAddedFields = outputSet(); + lazyAddedFields = new AttributeSet(output()); lazyAddedFields.removeAll(left().output()); } return lazyAddedFields; From f9223531ac5fab80bd4c7204eee8df409906f6e3 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 22 Nov 2024 13:14:26 +0000 Subject: [PATCH 34/50] Enable test-fixture test suites (#117329) Today the `:test:fixtures` modules' test suites are disabled, but in fact these fixtures do have nontrivial behaviour that wants testing in its own right, so we should run their tests. This commit reinstates the disabled tests and fixes one which should have been fixed as part of #116212. --- test/fixtures/build.gradle | 9 --------- .../src/test/java/fixture/s3/S3HttpHandlerTests.java | 12 ++++++++++-- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/test/fixtures/build.gradle b/test/fixtures/build.gradle index 02d68517903a3..e69de29bb2d1d 100644 --- a/test/fixtures/build.gradle +++ b/test/fixtures/build.gradle @@ -1,9 +0,0 @@ - -subprojects { - // fixtures don't have tests, these are external projects used by the build - pluginManager.withPlugin('java') { - tasks.named('test').configure { - enabled = false - } - } -} diff --git a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java index 375f428f748e6..58f32292fa91c 100644 --- a/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java +++ b/test/fixtures/s3-fixture/src/test/java/fixture/s3/S3HttpHandlerTests.java @@ -31,6 +31,8 @@ import java.util.List; import java.util.Objects; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.greaterThan; public class S3HttpHandlerTests extends ESTestCase { @@ -261,7 +263,7 @@ public void testListAndAbortMultipartUpload() { path/blob10000false\ """), handleRequest(handler, "GET", "/bucket/?uploads&prefix=path/blob")); - assertEquals(RestStatus.NOT_FOUND, handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" + final var completeUploadResponse = handleRequest(handler, "POST", "/bucket/path/blob?uploadId=" + uploadId, Strings.format(""" @@ -272,7 +274,13 @@ public void testListAndAbortMultipartUpload() { %s 2 - """, part1Etag, part2Etag)).status()); + """, part1Etag, part2Etag)); + if (completeUploadResponse.status() == RestStatus.OK) { + // possible, but rare, indicating that S3 started processing the upload before returning an error + assertThat(completeUploadResponse.body().utf8ToString(), allOf(containsString(""), containsString("NoSuchUpload"))); + } else { + assertEquals(RestStatus.NOT_FOUND, completeUploadResponse.status()); + } } private static String getUploadId(BytesReference createUploadResponseBody) { From 09a53388cc7713d8d9fdca44bc50db7fd049ab45 Mon Sep 17 00:00:00 2001 From: Bogdan Pintea Date: Fri, 22 Nov 2024 14:48:47 +0100 Subject: [PATCH 35/50] ESQL: drop RowExec (#117133) Drop `RowExec` physical node: `Row` is now optimised away into a `LocalRelation`, which has its own physical mapping. `Row` is kept around as a container for the logical optimisations/folding of the expressions supported by the `ROW` command (which makes it in fact a source _plus_ `EVAL`), `LocalRelation` only being a container for the schema and end results (it doesn't actually go through transformations). Fixes #104960 --- .../compute/operator/RowOperator.java | 47 ----------- .../compute/operator/RowOperatorTests.java | 81 ------------------- .../esql/optimizer/LogicalPlanOptimizer.java | 3 +- .../logical/ReplaceRowAsLocalRelation.java | 30 +++++++ .../xpack/esql/plan/PlanWritables.java | 2 - .../xpack/esql/plan/physical/RowExec.java | 75 ----------------- .../esql/planner/LocalExecutionPlanner.java | 11 --- .../esql/planner/mapper/MapperUtils.java | 19 ----- .../optimizer/LogicalPlanOptimizerTests.java | 12 +-- .../physical/RowExecSerializationTests.java | 51 ------------ 10 files changed, 38 insertions(+), 293 deletions(-) delete mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java delete mode 100644 x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java create mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java delete mode 100644 x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java delete mode 100644 x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java deleted file mode 100644 index 4b4379eb6a4d8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.elasticsearch.compute.data.BlockFactory; - -import java.util.List; -import java.util.Objects; - -import static java.util.stream.Collectors.joining; - -public class RowOperator extends LocalSourceOperator { - - private final List objects; - - public record RowOperatorFactory(List objects) implements SourceOperatorFactory { - - @Override - public SourceOperator get(DriverContext driverContext) { - return new RowOperator(driverContext.blockFactory(), objects); - } - - @Override - public String describe() { - return "RowOperator[objects = " + objects.stream().map(Objects::toString).collect(joining(",")) + "]"; - } - } - - public RowOperator(BlockFactory blockFactory, List objects) { - super(blockFactory, () -> objects); - this.objects = objects; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(this.getClass().getSimpleName()).append("["); - sb.append("objects=").append(objects); - sb.append("]"); - return sb.toString(); - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java deleted file mode 100644 index cd8a49939fbb5..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.operator; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; - -import java.util.Arrays; -import java.util.List; - -import static org.hamcrest.Matchers.equalTo; - -public class RowOperatorTests extends ESTestCase { - final DriverContext driverContext = new DriverContext( - new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - TestBlockFactory.getNonBreakingInstance() - ); - - public void testBoolean() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(false)); - assertThat(factory.describe(), equalTo("RowOperator[objects = false]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[false]]")); - BooleanBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBoolean(0), equalTo(false)); - } - - public void testInt() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(213)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 213]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[213]]")); - IntBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getInt(0), equalTo(213)); - } - - public void testLong() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(21321343214L)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 21321343214]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[21321343214]]")); - LongBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getLong(0), equalTo(21321343214L)); - } - - public void testDouble() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(2.0)); - assertThat(factory.describe(), equalTo("RowOperator[objects = 2.0]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[2.0]]")); - DoubleBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getDouble(0), equalTo(2.0)); - } - - public void testString() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(List.of(new BytesRef("cat"))); - assertThat(factory.describe(), equalTo("RowOperator[objects = [63 61 74]]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[[63 61 74]]]")); - BytesRefBlock block = factory.get(driverContext).getOutput().getBlock(0); - assertThat(block.getBytesRef(0, new BytesRef()), equalTo(new BytesRef("cat"))); - } - - public void testNull() { - RowOperator.RowOperatorFactory factory = new RowOperator.RowOperatorFactory(Arrays.asList(new Object[] { null })); - assertThat(factory.describe(), equalTo("RowOperator[objects = null]")); - assertThat(factory.get(driverContext).toString(), equalTo("RowOperator[objects=[null]]")); - Block block = factory.get(driverContext).getOutput().getBlock(0); - assertTrue(block.isNull(0)); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 5007b011092f0..a5f97cf961378 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceLimitAndSortAsTopN; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceOrderByExpressionWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRegexMatch; +import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceRowAsLocalRelation; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceStatsFilteredAggWithEval; import org.elasticsearch.xpack.esql.optimizer.rules.logical.ReplaceTrivialTypeConversions; import org.elasticsearch.xpack.esql.optimizer.rules.logical.SetAsOptimized; @@ -192,6 +193,6 @@ protected static Batch operators() { } protected static Batch cleanup() { - return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN()); + return new Batch<>("Clean Up", new ReplaceLimitAndSortAsTopN(), new ReplaceRowAsLocalRelation()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java new file mode 100644 index 0000000000000..eebeb1dc14f48 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRowAsLocalRelation.java @@ -0,0 +1,30 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.optimizer.rules.logical; + +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; +import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; + +import java.util.ArrayList; +import java.util.List; + +public final class ReplaceRowAsLocalRelation extends OptimizerRules.OptimizerRule { + + @Override + protected LogicalPlan rule(Row row) { + var fields = row.fields(); + List values = new ArrayList<>(fields.size()); + fields.forEach(f -> values.add(f.child().fold())); + var blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); + return new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java index 40b91beaee3eb..b3c273cbfa1bb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/PlanWritables.java @@ -45,7 +45,6 @@ import org.elasticsearch.xpack.esql.plan.physical.MvExpandExec; import org.elasticsearch.xpack.esql.plan.physical.OrderExec; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.SubqueryExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; @@ -106,7 +105,6 @@ public static List phsyical() { MvExpandExec.ENTRY, OrderExec.ENTRY, ProjectExec.ENTRY, - RowExec.ENTRY, ShowExec.ENTRY, SubqueryExec.ENTRY, TopNExec.ENTRY diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java deleted file mode 100644 index 3a104d4bc292b..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/RowExec.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.tree.NodeInfo; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class RowExec extends LeafExec { - public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(PhysicalPlan.class, "RowExec", RowExec::new); - - private final List fields; - - public RowExec(Source source, List fields) { - super(source); - this.fields = fields; - } - - private RowExec(StreamInput in) throws IOException { - this(Source.readFrom((PlanStreamInput) in), in.readCollectionAsList(Alias::new)); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeCollection(fields()); - } - - @Override - public String getWriteableName() { - return ENTRY.name; - } - - public List fields() { - return fields; - } - - @Override - public List output() { - return Expressions.asAttributes(fields); - } - - @Override - protected NodeInfo info() { - return NodeInfo.create(this, RowExec::new, fields); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RowExec constant = (RowExec) o; - return Objects.equals(fields, constant.fields); - } - - @Override - public int hashCode() { - return Objects.hash(fields); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index 1096c917fed4f..1ffc652e54337 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -31,7 +31,6 @@ import org.elasticsearch.compute.operator.Operator.OperatorFactory; import org.elasticsearch.compute.operator.OutputOperator.OutputOperatorFactory; import org.elasticsearch.compute.operator.RowInTableLookupOperator; -import org.elasticsearch.compute.operator.RowOperator.RowOperatorFactory; import org.elasticsearch.compute.operator.ShowOperator; import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SinkOperator.SinkOperatorFactory; @@ -89,7 +88,6 @@ import org.elasticsearch.xpack.esql.plan.physical.OutputExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; -import org.elasticsearch.xpack.esql.plan.physical.RowExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -220,8 +218,6 @@ else if (node instanceof EsQueryExec esQuery) { return planEsQueryNode(esQuery, context); } else if (node instanceof EsStatsQueryExec statsQuery) { return planEsStats(statsQuery, context); - } else if (node instanceof RowExec row) { - return planRow(row, context); } else if (node instanceof LocalSourceExec localSource) { return planLocal(localSource, context); } else if (node instanceof ShowExec show) { @@ -620,13 +616,6 @@ private ExpressionEvaluator.Factory toEvaluator(Expression exp, Layout layout) { return EvalMapper.toEvaluator(exp, layout); } - private PhysicalOperation planRow(RowExec row, LocalExecutionPlannerContext context) { - List obj = row.fields().stream().map(f -> f.child().fold()).toList(); - Layout.Builder layout = new Layout.Builder(); - layout.append(row.output()); - return PhysicalOperation.fromSource(new RowOperatorFactory(obj), layout.build()); - } - private PhysicalOperation planLocal(LocalSourceExec localSourceExec, LocalExecutionPlannerContext context) { Layout.Builder layout = new Layout.Builder(); layout.append(localSourceExec.output()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java index ea21943aced9b..e881eabb38c43 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/mapper/MapperUtils.java @@ -9,10 +9,7 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -27,10 +24,8 @@ import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Project; -import org.elasticsearch.xpack.esql.plan.logical.Row; import org.elasticsearch.xpack.esql.plan.logical.UnaryPlan; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.logical.show.ShowInfo; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; @@ -45,9 +40,7 @@ import org.elasticsearch.xpack.esql.plan.physical.ProjectExec; import org.elasticsearch.xpack.esql.plan.physical.ShowExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; -import java.util.ArrayList; import java.util.List; /** @@ -57,18 +50,6 @@ class MapperUtils { private MapperUtils() {} static PhysicalPlan mapLeaf(LeafPlan p) { - if (p instanceof Row row) { - // return new RowExec(row.source(), row.fields()); - // convert row into local relation - List fields = row.fields(); - List values = new ArrayList<>(fields.size()); - for (Alias field : fields) { - values.add(field.child().fold()); - } - Block[] blocks = BlockUtils.fromListRow(PlannerUtils.NON_BREAKING_BLOCK_FACTORY, values); - p = new LocalRelation(row.source(), row.output(), LocalSupplier.of(blocks)); - } - if (p instanceof LocalRelation local) { return new LocalSourceExec(local.source(), local.output(), local.supplier()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 96951ee15d48b..a11a9cef82989 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -2141,7 +2141,7 @@ public void testLimitThenSortBeforeMvExpand() { mvExpand = as(topN.child(), MvExpand.class); var limit = as(mvExpand.child(), Limit.class); assertThat(limit.limit().fold(), equalTo(7300)); - as(limit.child(), Row.class); + as(limit.child(), LocalRelation.class); } /** @@ -2286,7 +2286,7 @@ public void testSortMvExpand() { var expand = as(plan, MvExpand.class); assertThat(expand.limit(), equalTo(1000)); var topN = as(expand.child(), TopN.class); - var row = as(topN.child(), Row.class); + var row = as(topN.child(), LocalRelation.class); } /** @@ -2327,7 +2327,7 @@ public void testWhereMvExpand() { assertThat(expand.limit(), equalTo(1000)); var limit2 = as(expand.child(), Limit.class); assertThat(limit2.limit().fold(), is(1000)); - var row = as(limit2.child(), Row.class); + var row = as(limit2.child(), LocalRelation.class); } private static List orderNames(TopN topN) { @@ -3545,7 +3545,7 @@ public void testMvExpandFoldable() { var filterProp = ((GreaterThan) filter.condition()).left(); assertTrue(expand.expanded().semanticEquals(filterProp)); assertFalse(expand.target().semanticEquals(filterProp)); - var row = as(expand.child(), Row.class); + var row = as(expand.child(), LocalRelation.class); } /** @@ -3564,7 +3564,7 @@ public void testRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** @@ -3583,7 +3583,7 @@ public void testMultipleRenameStatsDropGroup() { var limit = as(plan, Limit.class); var agg = as(limit.child(), Aggregate.class); assertThat(Expressions.names(agg.groupings()), contains("a", "b")); - var row = as(agg.child(), Row.class); + var row = as(agg.child(), LocalRelation.class); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java deleted file mode 100644 index 3dd44cd20e369..0000000000000 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/RowExecSerializationTests.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.plan.physical; - -import org.elasticsearch.xpack.esql.core.expression.Alias; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.LiteralTests; -import org.elasticsearch.xpack.esql.core.expression.NameId; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.tree.SourceTests; - -import java.io.IOException; -import java.util.List; - -public class RowExecSerializationTests extends AbstractPhysicalPlanSerializationTests { - public static RowExec randomRowExec() { - Source source = randomSource(); - List fields = randomList(1, 10, RowExecSerializationTests::randomAlias); - return new RowExec(source, fields); - } - - private static Alias randomAlias() { - Source source = SourceTests.randomSource(); - String name = randomAlphaOfLength(5); - Expression child = LiteralTests.randomLiteral(); - boolean synthetic = randomBoolean(); - return new Alias(source, name, child, new NameId(), synthetic); - } - - @Override - protected RowExec createTestInstance() { - return randomRowExec(); - } - - @Override - protected RowExec mutateInstance(RowExec instance) throws IOException { - List fields = instance.fields(); - fields = randomValueOtherThan(fields, () -> randomList(1, 10, RowExecSerializationTests::randomAlias)); - return new RowExec(instance.source(), fields); - } - - @Override - protected boolean alwaysEmptySource() { - return true; - } -} From bd18787af5a21d3bc95b356e713f2014498d842f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mariusz=20J=C3=B3zala?= <377355+jozala@users.noreply.github.com> Date: Fri, 22 Nov 2024 14:55:25 +0100 Subject: [PATCH 36/50] Change default container image to be based on UBI minimal instead of Ubuntu (#116739) Previously default Docker image was based on Ubuntu. This changes the base image for default to be UBI minimal. --- .../pull-request/packaging-tests-unix.yml | 67 ++--------------- .../gradle/internal/DockerBase.java | 4 +- distribution/docker/README.md | 12 +-- distribution/docker/build.gradle | 12 ++- distribution/docker/src/docker/Dockerfile | 73 ++----------------- .../ubi-docker-aarch64-export/build.gradle | 2 - .../docker/ubi-docker-export/build.gradle | 2 - .../packaging/test/DockerTests.java | 26 +++---- .../test/KeystoreManagementTests.java | 2 +- .../packaging/test/PackagingTestCase.java | 6 +- .../packaging/util/Distribution.java | 5 +- .../packaging/util/docker/DockerRun.java | 1 - settings.gradle | 2 - 13 files changed, 37 insertions(+), 177 deletions(-) delete mode 100644 distribution/docker/ubi-docker-aarch64-export/build.gradle delete mode 100644 distribution/docker/ubi-docker-export/build.gradle diff --git a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml index 8bec706bb758d..ffc1350aceab3 100644 --- a/.buildkite/pipelines/pull-request/packaging-tests-unix.yml +++ b/.buildkite/pipelines/pull-request/packaging-tests-unix.yml @@ -3,65 +3,9 @@ config: steps: - group: packaging-tests-unix steps: - - label: "{{matrix.image}} / docker / packaging-tests-unix" - key: "packaging-tests-unix-docker" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.docker-cloud-ess - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - debian-12 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / packages / packaging-tests-unix" - key: "packaging-tests-unix-packages" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.packages - timeout_in_minutes: 300 - matrix: - setup: - image: - - debian-11 - - debian-12 - - opensuse-leap-15 - - oraclelinux-7 - - oraclelinux-8 - - sles-12 - - sles-15 - - ubuntu-1804 - - ubuntu-2004 - - ubuntu-2204 - - rocky-8 - - rocky-9 - - rhel-7 - - rhel-8 - - rhel-9 - - almalinux-8 - agents: - provider: gcp - image: family/elasticsearch-{{matrix.image}} - diskSizeGb: 350 - machineType: custom-16-32768 - - label: "{{matrix.image}} / archives / packaging-tests-unix" - key: "packaging-tests-unix-archives" - command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.archives + - label: "{{matrix.image}} / {{matrix.PACKAGING_TASK}} / packaging-tests-unix" + key: "packaging-tests-unix" + command: ./.ci/scripts/packaging-test.sh destructiveDistroTest.{{matrix.PACKAGING_TASK}} timeout_in_minutes: 300 matrix: setup: @@ -82,6 +26,11 @@ steps: - rhel-8 - rhel-9 - almalinux-8 + PACKAGING_TASK: + - docker + - docker-cloud-ess + - packages + - archives agents: provider: gcp image: family/elasticsearch-{{matrix.image}} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index 3e0a47a8f453c..bf901fef90450 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -13,10 +13,8 @@ * This class models the different Docker base images that are used to build Docker distributions of Elasticsearch. */ public enum DockerBase { - DEFAULT("ubuntu:20.04", "", "apt-get"), - // "latest" here is intentional, since the image name specifies "8" - UBI("docker.elastic.co/ubi8/ubi-minimal:latest", "-ubi", "microdnf"), + DEFAULT("docker.elastic.co/ubi8/ubi-minimal:latest", "", "microdnf"), // The Iron Bank base image is UBI (albeit hardened), but we are required to parameterize the Docker build IRON_BANK("${BASE_REGISTRY}/${BASE_IMAGE}:${BASE_TAG}", "-ironbank", "yum"), diff --git a/distribution/docker/README.md b/distribution/docker/README.md index 49facab461edc..9438b4f1e82c3 100644 --- a/distribution/docker/README.md +++ b/distribution/docker/README.md @@ -3,8 +3,7 @@ The ES build can generate several types of Docker image. These are enumerated in the [DockerBase] enum. - * Default - this is what most people use, and is based on Ubuntu - * UBI - the same as the default image, but based upon [RedHat's UBI + * Default - this is what most people use, and is based on [RedHat's UBI images][ubi], specifically their minimal flavour. * Wolfi - the same as the default image, but based upon [Wolfi](https://github.com/wolfi-dev) * Cloud ESS - this directly extends the Wolfi image, and adds all ES plugins @@ -23,14 +22,7 @@ the [DockerBase] enum. software (FOSS) and Commercial off-the-shelf (COTS). In practice, this is another UBI build, this time on the regular UBI image, with extra hardening. See below for more details. - * Cloud - this is mostly the same as the default image, with some notable differences: - * `filebeat` and `metricbeat` are included - * `wget` is included - * The `ENTRYPOINT` is just `/bin/tini`, and the `CMD` is - `/app/elasticsearch.sh`. In normal use this file would be bind-mounted - in, but the image ships a stub version of this file so that the image - can still be tested. -The long-term goal is for both Cloud images to be retired in favour of the +The long-term goal is for Cloud ESS image to be retired in favour of the default image. diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index d73f9c395f15c..f5b94fb9dfd94 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -527,9 +527,7 @@ subprojects { Project subProject -> final Architecture architecture = subProject.name.contains('aarch64-') ? Architecture.AARCH64 : Architecture.X64 DockerBase base = DockerBase.DEFAULT - if (subProject.name.contains('ubi-')) { - base = DockerBase.UBI - } else if (subProject.name.contains('ironbank-')) { + if (subProject.name.contains('ironbank-')) { base = DockerBase.IRON_BANK } else if (subProject.name.contains('cloud-ess-')) { base = DockerBase.CLOUD_ESS @@ -538,11 +536,11 @@ subprojects { Project subProject -> } final String arch = architecture == Architecture.AARCH64 ? '-aarch64' : '' - final String extension = base == DockerBase.UBI ? 'ubi.tar' : + final String extension = (base == DockerBase.IRON_BANK ? 'ironbank.tar' : - (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : - (base == DockerBase.WOLFI ? 'wolfi.tar' : - 'docker.tar'))) + (base == DockerBase.CLOUD_ESS ? 'cloud-ess.tar' : + (base == DockerBase.WOLFI ? 'wolfi.tar' : + 'docker.tar'))) final String artifactName = "elasticsearch${arch}${base.suffix}_test" final String exportTaskName = taskName("export", architecture, base, 'DockerImage') diff --git a/distribution/docker/src/docker/Dockerfile b/distribution/docker/src/docker/Dockerfile index fd2516f2fdc9a..6cb030565d9d2 100644 --- a/distribution/docker/src/docker/Dockerfile +++ b/distribution/docker/src/docker/Dockerfile @@ -41,9 +41,7 @@ RUN chmod 0555 /bin/tini <% } else { %> # Install required packages to extract the Elasticsearch distribution -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -RUN <%= retry.loop(package_manager, "${package_manager} update && DEBIAN_FRONTEND=noninteractive ${package_manager} install -y curl ") %> -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN <%= retry.loop(package_manager, "export DEBIAN_FRONTEND=noninteractive && ${package_manager} update && ${package_manager} update && ${package_manager} add --no-cache curl") %> <% } else { %> RUN <%= retry.loop(package_manager, "${package_manager} install -y findutils tar gzip") %> @@ -117,27 +115,6 @@ RUN sed -i -e 's/ES_DISTRIBUTION_TYPE=tar/ES_DISTRIBUTION_TYPE=docker/' bin/elas chmod 0775 bin config config/jvm.options.d data logs plugins && \\ find config -type f -exec chmod 0664 {} + -<% if (docker_base == "cloud") { %> -COPY filebeat-${version}.tar.gz metricbeat-${version}.tar.gz /tmp/ -RUN set -eux ; \\ - for beat in filebeat metricbeat ; do \\ - if [ ! -s /tmp/\$beat-${version}.tar.gz ]; then \\ - echo "/tmp/\$beat-${version}.tar.gz is empty - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - if ! tar tf /tmp/\$beat-${version}.tar.gz >/dev/null; then \\ - echo "/tmp/\$beat-${version}.tar.gz is corrupt - cannot uncompress" 2>&1 ; \\ - exit 1 ; \\ - fi ; \\ - mkdir -p /opt/\$beat ; \\ - tar xf /tmp/\$beat-${version}.tar.gz -C /opt/\$beat --strip-components=1 ; \\ - done - -# Add plugins infrastructure -RUN mkdir -p /opt/plugins/archive -RUN chmod -R 0555 /opt/plugins -<% } %> - ################################################################################ # Build stage 2 (the actual Elasticsearch image): # @@ -173,21 +150,6 @@ SHELL ["/bin/bash", "-c"] # Optionally set Bash as the default shell in the container at runtime CMD ["/bin/bash"] -<% } else if (docker_base == "default" || docker_base == "cloud") { %> - -# Change default shell to bash, then install required packages with retries. -RUN yes no | dpkg-reconfigure dash && \\ - <%= retry.loop( - package_manager, - "export DEBIAN_FRONTEND=noninteractive && \n" + - " ${package_manager} update && \n" + - " ${package_manager} upgrade -y && \n" + - " ${package_manager} install -y --no-install-recommends \n" + - " ca-certificates curl netcat p11-kit unzip zip ${docker_base == 'cloud' ? 'wget' : '' } && \n" + - " ${package_manager} clean && \n" + - " rm -rf /var/lib/apt/lists/*" - ) %> - <% } else { %> RUN <%= retry.loop( @@ -201,12 +163,7 @@ RUN <%= retry.loop( <% } %> -<% if (docker_base == "default" || docker_base == "cloud") { %> -RUN groupadd -g 1000 elasticsearch && \\ - adduser --uid 1000 --gid 1000 --home /usr/share/elasticsearch elasticsearch && \\ - adduser elasticsearch root && \\ - chown -R 0:0 /usr/share/elasticsearch -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> RUN groupadd -g 1000 elasticsearch && \ adduser -G elasticsearch -u 1000 elasticsearch -D --home /usr/share/elasticsearch elasticsearch && \ adduser elasticsearch root && \ @@ -226,10 +183,6 @@ COPY --from=builder --chown=0:0 /usr/share/elasticsearch /usr/share/elasticsearc COPY --from=builder --chown=0:0 /bin/tini /bin/tini <% } %> -<% if (docker_base == 'cloud') { %> -COPY --from=builder --chown=0:0 /opt /opt -<% } %> - ENV PATH /usr/share/elasticsearch/bin:\$PATH ENV SHELL /bin/bash COPY ${bin_dir}/docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh @@ -251,12 +204,7 @@ RUN chmod g=u /etc/passwd && \\ chmod 0775 /usr/share/elasticsearch && \\ chown elasticsearch bin config config/jvm.options.d data logs plugins -<% if (docker_base == 'default' || docker_base == 'cloud') { %> -# Update "cacerts" bundle to use Ubuntu's CA certificates (and make sure it -# stays up-to-date with changes to Ubuntu's store) -COPY bin/docker-openjdk /etc/ca-certificates/update.d/docker-openjdk -RUN /etc/ca-certificates/update.d/docker-openjdk -<% } else if (docker_base == 'wolfi') { %> +<% if (docker_base == 'wolfi') { %> RUN ln -sf /etc/ssl/certs/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts <% } else { %> RUN ln -sf /etc/pki/ca-trust/extracted/java/cacerts /usr/share/elasticsearch/jdk/lib/security/cacerts @@ -284,9 +232,7 @@ LABEL org.label-schema.build-date="${build_date}" \\ org.opencontainers.image.url="https://www.elastic.co/products/elasticsearch" \\ org.opencontainers.image.vendor="Elastic" \\ org.opencontainers.image.version="${version}" -<% } %> -<% if (docker_base == 'ubi') { %> LABEL name="Elasticsearch" \\ maintainer="infra@elastic.co" \\ vendor="Elastic" \\ @@ -296,21 +242,12 @@ LABEL name="Elasticsearch" \\ description="You know, for search." <% } %> -<% if (docker_base == 'ubi') { %> -RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE -<% } else if (docker_base == 'iron_bank') { %> RUN mkdir /licenses && cp LICENSE.txt /licenses/LICENSE +<% if (docker_base == 'iron_bank') { %> COPY LICENSE /licenses/LICENSE.addendum <% } %> -<% if (docker_base == "cloud") { %> -ENTRYPOINT ["/bin/tini", "--"] -CMD ["/app/elasticsearch.sh"] -# Generate a stub command that will be overwritten at runtime -RUN mkdir /app && \\ - echo -e '#!/bin/bash\\nexec /usr/local/bin/docker-entrypoint.sh eswrapper' > /app/elasticsearch.sh && \\ - chmod 0555 /app/elasticsearch.sh -<% } else if (docker_base == "wolfi") { %> +<% if (docker_base == "wolfi") { %> # Our actual entrypoint is `tini`, a minimal but functional init program. It # calls the entrypoint we provide, while correctly forwarding signals. ENTRYPOINT ["/sbin/tini", "--", "/usr/local/bin/docker-entrypoint.sh"] diff --git a/distribution/docker/ubi-docker-aarch64-export/build.gradle b/distribution/docker/ubi-docker-aarch64-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-aarch64-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/distribution/docker/ubi-docker-export/build.gradle b/distribution/docker/ubi-docker-export/build.gradle deleted file mode 100644 index 537b5a093683e..0000000000000 --- a/distribution/docker/ubi-docker-export/build.gradle +++ /dev/null @@ -1,2 +0,0 @@ -// This file is intentionally blank. All configuration of the -// export is done in the parent project. diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java index 8cb8354eb5d71..3ad4c247a8b9b 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/DockerTests.java @@ -96,11 +96,10 @@ /** * This class tests the Elasticsearch Docker images. We have several: *
    - *
  • The default image with a custom, small base image
  • - *
  • A UBI-based image
  • + *
  • The default image UBI-based image
  • *
  • Another UBI image for Iron Bank
  • *
  • A WOLFI-based image
  • - *
  • Images for Cloud
  • + *
  • Image for Cloud
  • *
*/ @ThreadLeakFilters(defaultFilters = true, filters = { HttpClientThreadsFilter.class }) @@ -383,15 +382,14 @@ public void test026InstallBundledRepositoryPluginsViaConfigFile() { public void test040JavaUsesTheOsProvidedKeystore() { final String path = sh.run("realpath jdk/lib/security/cacerts").stdout(); - if (distribution.packaging == Packaging.DOCKER_UBI || distribution.packaging == Packaging.DOCKER_IRON_BANK) { + if (distribution.packaging == Packaging.DOCKER || distribution.packaging == Packaging.DOCKER_IRON_BANK) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/pki/ca-trust/extracted/java/cacerts")); } else if (distribution.packaging == Packaging.DOCKER_WOLFI || distribution.packaging == Packaging.DOCKER_CLOUD_ESS) { // In these images, the `cacerts` file ought to be a symlink here assertThat(path, equalTo("/etc/ssl/certs/java/cacerts")); } else { - // Whereas on other images, it's a real file so the real path is the same - assertThat(path, equalTo("/usr/share/elasticsearch/jdk/lib/security/cacerts")); + fail("Unknown distribution: " + distribution.packaging); } } @@ -1126,25 +1124,25 @@ public void test171AdditionalCliOptionsAreForwarded() throws Exception { } /** - * Check that the UBI images has the correct license information in the correct place. + * Check that the Docker images have the correct license information in the correct place. */ - public void test200UbiImagesHaveLicenseDirectory() { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test200ImagesHaveLicenseDirectory() { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final String[] files = sh.run("find /licenses -type f").stdout().split("\n"); assertThat(files, arrayContaining("/licenses/LICENSE")); // UBI image doesn't contain `diff` - final String ubiLicense = sh.run("cat /licenses/LICENSE").stdout(); + final String imageLicense = sh.run("cat /licenses/LICENSE").stdout(); final String distroLicense = sh.run("cat /usr/share/elasticsearch/LICENSE.txt").stdout(); - assertThat(ubiLicense, equalTo(distroLicense)); + assertThat(imageLicense, equalTo(distroLicense)); } /** - * Check that the UBI image has the expected labels + * Check that the images has the expected labels */ - public void test210UbiLabels() throws Exception { - assumeTrue(distribution.packaging == Packaging.DOCKER_UBI); + public void test210Labels() throws Exception { + assumeTrue(distribution.packaging != Packaging.DOCKER_IRON_BANK); final Map labels = getImageLabels(distribution); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java index 02e1ce35764cf..a47dd0e57642e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/KeystoreManagementTests.java @@ -436,7 +436,7 @@ private void verifyKeystorePermissions() { switch (distribution.packaging) { case TAR, ZIP -> assertThat(keystore, file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); case DEB, RPM -> assertThat(keystore, file(File, "root", "elasticsearch", p660)); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> assertThat(keystore, DockerFileMatcher.file(p660)); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java index b4a00ca56924a..a157cc84e624e 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/test/PackagingTestCase.java @@ -245,7 +245,7 @@ protected static void install() throws Exception { installation = Packages.installPackage(sh, distribution); Packages.verifyPackageInstallation(installation, distribution, sh); } - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> { installation = Docker.runContainer(distribution); Docker.verifyContainerInstallation(installation); } @@ -333,7 +333,6 @@ public Shell.Result runElasticsearchStartCommand(String password, boolean daemon case RPM: return Packages.runElasticsearchStartCommand(sh); case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -355,7 +354,6 @@ public void stopElasticsearch() throws Exception { Packages.stopElasticsearch(sh); break; case DOCKER: - case DOCKER_UBI: case DOCKER_IRON_BANK: case DOCKER_CLOUD_ESS: case DOCKER_WOLFI: @@ -371,7 +369,7 @@ public void awaitElasticsearchStartup(Shell.Result result) throws Exception { switch (distribution.packaging) { case TAR, ZIP -> Archives.assertElasticsearchStarted(installation); case DEB, RPM -> Packages.assertElasticsearchStarted(sh, installation); - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> Docker.waitForElasticsearchToStart(); default -> throw new IllegalStateException("Unknown Elasticsearch packaging type."); } } diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java index 11b8324384631..55c59db6219d3 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/Distribution.java @@ -29,8 +29,6 @@ public Distribution(Path path) { this.packaging = Packaging.TAR; } else if (filename.endsWith(".docker.tar")) { this.packaging = Packaging.DOCKER; - } else if (filename.endsWith(".ubi.tar")) { - this.packaging = Packaging.DOCKER_UBI; } else if (filename.endsWith(".ironbank.tar")) { this.packaging = Packaging.DOCKER_IRON_BANK; } else if (filename.endsWith(".cloud-ess.tar")) { @@ -61,7 +59,7 @@ public boolean isPackage() { */ public boolean isDocker() { return switch (packaging) { - case DOCKER, DOCKER_UBI, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; + case DOCKER, DOCKER_IRON_BANK, DOCKER_CLOUD_ESS, DOCKER_WOLFI -> true; default -> false; }; } @@ -73,7 +71,6 @@ public enum Packaging { DEB(".deb", Platforms.isDPKG()), RPM(".rpm", Platforms.isRPM()), DOCKER(".docker.tar", Platforms.isDocker()), - DOCKER_UBI(".ubi.tar", Platforms.isDocker()), DOCKER_IRON_BANK(".ironbank.tar", Platforms.isDocker()), DOCKER_CLOUD_ESS(".cloud-ess.tar", Platforms.isDocker()), DOCKER_WOLFI(".wolfi.tar", Platforms.isDocker()); diff --git a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java index e3eac23d3ecce..5dc47993072a8 100644 --- a/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java +++ b/qa/packaging/src/test/java/org/elasticsearch/packaging/util/docker/DockerRun.java @@ -163,7 +163,6 @@ String build() { public static String getImageName(Distribution distribution) { String suffix = switch (distribution.packaging) { case DOCKER -> ""; - case DOCKER_UBI -> "-ubi"; case DOCKER_IRON_BANK -> "-ironbank"; case DOCKER_CLOUD_ESS -> "-cloud-ess"; case DOCKER_WOLFI -> "-wolfi"; diff --git a/settings.gradle b/settings.gradle index d04d45bffc3ad..333f8272447c2 100644 --- a/settings.gradle +++ b/settings.gradle @@ -66,8 +66,6 @@ List projects = [ 'distribution:docker:docker-export', 'distribution:docker:ironbank-docker-aarch64-export', 'distribution:docker:ironbank-docker-export', - 'distribution:docker:ubi-docker-aarch64-export', - 'distribution:docker:ubi-docker-export', 'distribution:docker:wolfi-docker-aarch64-export', 'distribution:docker:wolfi-docker-export', 'distribution:packages:aarch64-deb', From 830c5048bae2a39242391c930a031ff00d0cce5d Mon Sep 17 00:00:00 2001 From: Mike Pellegrini Date: Fri, 22 Nov 2024 08:57:00 -0500 Subject: [PATCH 37/50] Always Emit Inference ID in Semantic Text Mapping (#117294) --- docs/changelog/117294.yaml | 5 ++++ .../xpack/inference/InferenceFeatures.java | 3 ++- .../mapper/SemanticTextFieldMapper.java | 5 +++- .../mapper/SemanticTextFieldMapperTests.java | 16 ++++++++++-- .../10_semantic_text_field_mapping.yml | 26 +++++++++++++++++++ 5 files changed, 51 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/117294.yaml diff --git a/docs/changelog/117294.yaml b/docs/changelog/117294.yaml new file mode 100644 index 0000000000000..f6e80690de7ff --- /dev/null +++ b/docs/changelog/117294.yaml @@ -0,0 +1,5 @@ +pr: 117294 +summary: Always Emit Inference ID in Semantic Text Mapping +area: Mapping +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java index f70e7f367127d..c82f287792a7c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferenceFeatures.java @@ -39,7 +39,8 @@ public Set getTestFeatures() { SemanticTextFieldMapper.SEMANTIC_TEXT_IN_OBJECT_FIELD_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX, SemanticTextFieldMapper.SEMANTIC_TEXT_DELETE_FIX, - SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX + SemanticTextFieldMapper.SEMANTIC_TEXT_ZERO_SIZE_FIX, + SemanticTextFieldMapper.SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX ); } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 89a54ffe29177..3744bf2a6dbed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -93,6 +93,9 @@ public class SemanticTextFieldMapper extends FieldMapper implements InferenceFie public static final NodeFeature SEMANTIC_TEXT_SINGLE_FIELD_UPDATE_FIX = new NodeFeature("semantic_text.single_field_update_fix"); public static final NodeFeature SEMANTIC_TEXT_DELETE_FIX = new NodeFeature("semantic_text.delete_fix"); public static final NodeFeature SEMANTIC_TEXT_ZERO_SIZE_FIX = new NodeFeature("semantic_text.zero_size_fix"); + public static final NodeFeature SEMANTIC_TEXT_ALWAYS_EMIT_INFERENCE_ID_FIX = new NodeFeature( + "semantic_text.always_emit_inference_id_fix" + ); public static final String CONTENT_TYPE = "semantic_text"; public static final String DEFAULT_ELSER_2_INFERENCE_ID = DEFAULT_ELSER_ID; @@ -119,7 +122,7 @@ public static class Builder extends FieldMapper.Builder { "[" + INFERENCE_ID_FIELD + "] on mapper [" + leafName() + "] of type [" + CONTENT_TYPE + "] must not be empty" ); } - }); + }).alwaysSerialize(); private final Parameter searchInferenceId = Parameter.stringParam( SEARCH_INFERENCE_ID_FIELD, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 6e58226f85f28..71ff9fc7d84cf 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -109,6 +109,12 @@ protected String minimalIsInvalidRoutingPathErrorMessage(Mapper mapper) { return "cannot have nested fields when index is in [index.mode=time_series]"; } + @Override + protected void metaMapping(XContentBuilder b) throws IOException { + super.metaMapping(b); + b.field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID); + } + @Override protected Object getSampleValueForDocument() { return null; @@ -166,10 +172,11 @@ protected void assertSearchable(MappedFieldType fieldType) { public void testDefaults() throws Exception { final String fieldName = "field"; final XContentBuilder fieldMapping = fieldMapping(this::minimalMapping); + final XContentBuilder expectedMapping = fieldMapping(this::metaMapping); MapperService mapperService = createMapperService(fieldMapping); DocumentMapper mapper = mapperService.documentMapper(); - assertEquals(Strings.toString(fieldMapping), mapper.mappingSource().toString()); + assertEquals(Strings.toString(expectedMapping), mapper.mappingSource().toString()); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, DEFAULT_ELSER_2_INFERENCE_ID); @@ -208,10 +215,15 @@ public void testSetInferenceEndpoints() throws IOException { final XContentBuilder fieldMapping = fieldMapping( b -> b.field("type", "semantic_text").field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) ); + final XContentBuilder expectedMapping = fieldMapping( + b -> b.field("type", "semantic_text") + .field(INFERENCE_ID_FIELD, DEFAULT_ELSER_2_INFERENCE_ID) + .field(SEARCH_INFERENCE_ID_FIELD, searchInferenceId) + ); final MapperService mapperService = createMapperService(fieldMapping); assertSemanticTextField(mapperService, fieldName, false); assertInferenceEndpoints(mapperService, fieldName, DEFAULT_ELSER_2_INFERENCE_ID, searchInferenceId); - assertSerialization.accept(fieldMapping, mapperService); + assertSerialization.accept(expectedMapping, mapperService); } { final XContentBuilder fieldMapping = fieldMapping( diff --git a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml index 71fb1fd95989f..882f1df03e926 100644 --- a/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml +++ b/x-pack/plugin/inference/src/yamlRestTest/resources/rest-api-spec/test/inference/10_semantic_text_field_mapping.yml @@ -371,3 +371,29 @@ setup: - match: { error.type: illegal_argument_exception } - match: { error.reason: "semantic_text field [level_1.level_2.sparse_field] cannot be in an object field with subobjects disabled" } + +--- +"Mapping always includes inference ID": + - requires: + cluster_features: "semantic_text.always_emit_inference_id_fix" + reason: always emit inference ID fix added in 8.17.0 + test_runner_features: [capabilities] + capabilities: + - method: GET + path: /_inference + capabilities: [default_elser_2] + + - do: + indices.create: + index: test-always-include-inference-id-index + body: + mappings: + properties: + semantic_field: + type: semantic_text + + - do: + indices.get_mapping: + index: test-always-include-inference-id-index + + - exists: test-always-include-inference-id-index.mappings.properties.semantic_field.inference_id From 13a51f2d425347abbf338c0abf776ff95f2db91b Mon Sep 17 00:00:00 2001 From: Nikolaj Volgushev Date: Fri, 22 Nov 2024 15:19:53 +0100 Subject: [PATCH 38/50] Distinguish `LicensedFeature` by family field (#116809) This PR fixes unintentional licensed feature overlaps for features with the same name but different family fields. --- docs/changelog/116809.yaml | 5 +++ .../license/LicensedFeature.java | 4 +- .../license/XPackLicenseStateTests.java | 45 +++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/116809.yaml diff --git a/docs/changelog/116809.yaml b/docs/changelog/116809.yaml new file mode 100644 index 0000000000000..61dbeb233d576 --- /dev/null +++ b/docs/changelog/116809.yaml @@ -0,0 +1,5 @@ +pr: 116809 +summary: "Distinguish `LicensedFeature` by family field" +area: License +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java index 56c8e87d1c502..d86c15aa14bc9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicensedFeature.java @@ -136,11 +136,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; LicensedFeature that = (LicensedFeature) o; - return Objects.equals(name, that.name); + return Objects.equals(name, that.name) && Objects.equals(family, that.family); } @Override public int hashCode() { - return Objects.hash(name); + return Objects.hash(name, family); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index 04fe20901749b..e889d25cd7a96 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -14,6 +14,7 @@ import java.util.Arrays; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -228,6 +229,50 @@ public void testLastUsedMomentaryFeature() { assertThat(lastUsed.get(usage), equalTo(200L)); } + public void testLastUsedMomentaryFeatureWithSameNameDifferentFamily() { + LicensedFeature.Momentary featureFamilyA = LicensedFeature.momentary("familyA", "goldFeature", GOLD); + LicensedFeature.Momentary featureFamilyB = LicensedFeature.momentary("familyB", "goldFeature", GOLD); + + AtomicInteger currentTime = new AtomicInteger(100); // non zero start time + XPackLicenseState licenseState = new XPackLicenseState(currentTime::get); + + featureFamilyA.check(licenseState); + featureFamilyB.check(licenseState); + + Map lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + Set actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 100L) + ) + ); + + currentTime.set(200); + featureFamilyB.check(licenseState); + + lastUsed = licenseState.getLastUsed(); + assertThat("feature.check tracks usage separately by family", lastUsed, aMapWithSize(2)); + actualFeatures = lastUsed.entrySet() + .stream() + .map(it -> new FeatureInfoWithTimestamp(it.getKey().feature().getFamily(), it.getKey().feature().getName(), it.getValue())) + .collect(Collectors.toSet()); + assertThat( + actualFeatures, + containsInAnyOrder( + new FeatureInfoWithTimestamp("familyA", "goldFeature", 100L), + new FeatureInfoWithTimestamp("familyB", "goldFeature", 200L) + ) + ); + } + + private record FeatureInfoWithTimestamp(String family, String featureName, Long timestamp) {} + public void testLastUsedPersistentFeature() { LicensedFeature.Persistent goldFeature = LicensedFeature.persistent("family", "goldFeature", GOLD); AtomicInteger currentTime = new AtomicInteger(100); // non zero start time From 29be961fd14ce35ad678f206781f0b463f6d0c7d Mon Sep 17 00:00:00 2001 From: Dimitris Rempapis Date: Fri, 22 Nov 2024 16:20:32 +0200 Subject: [PATCH 39/50] Search Queries in parallel - part 1 (#116812) Runs some spots in parallel to increase test execution performance and get some coverage on parallel query execution --- .../template/SimpleIndexTemplateIT.java | 24 +-- .../aggregations/bucket/RandomSamplerIT.java | 46 ++-- .../search/sort/FieldSortIT.java | 197 +++++------------- .../search/sort/GeoDistanceSortBuilderIT.java | 46 +--- .../search/source/MetadataFetchingIT.java | 15 +- .../search/source/SourceFetchingIT.java | 40 ++-- 6 files changed, 118 insertions(+), 250 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java index 0647a24aa39c8..de9e3f28a2109 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/template/SimpleIndexTemplateIT.java @@ -50,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertRequestBuilderThrows; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -843,24 +844,13 @@ public void testMultipleTemplate() throws IOException { ensureGreen(); - // ax -> matches template - assertResponse( + assertResponses(response -> { + assertHitCount(response, 1); + assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); + assertNull(response.getHits().getAt(0).field("field2")); + }, prepareSearch("ax").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } - ); - - // bx -> matches template - assertResponse( - prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2"), - response -> { - assertHitCount(response, 1); - assertEquals("value1", response.getHits().getAt(0).field("field1").getValue().toString()); - assertNull(response.getHits().getAt(0).field("field2")); - } + prepareSearch("bx").setQuery(termQuery("field1", "value1")).addStoredField("field1").addStoredField("field2") ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java index 544f0a08eaa6c..0aa28b9f9dbe8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RandomSamplerIT.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.aggregations.bucket; import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.sampler.random.InternalRandomSampler; import org.elasticsearch.search.aggregations.bucket.sampler.random.RandomSamplerAggregationBuilder; @@ -20,11 +21,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.stream.IntStream; import static org.elasticsearch.search.aggregations.AggregationBuilders.avg; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.lessThan; @@ -112,27 +115,28 @@ public void testRandomSamplerConsistentSeed() { } ); - for (int i = 0; i < NUM_SAMPLE_RUNS; i++) { - assertResponse( - prepareSearch("idx").setPreference("shard:0") - .addAggregation( - new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) - .setSeed(0) - .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) - .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) - .setShardSeed(42) - ), - response -> { - InternalRandomSampler sampler = response.getAggregations().get("sampler"); - double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); - double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); - long docCount = sampler.getDocCount(); - assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); - assertEquals(numericValue, sampleNumericValue[0], tolerance); - assertEquals(docCount, sampledDocCount[0]); - } - ); - } + assertResponses(response -> { + InternalRandomSampler sampler = response.getAggregations().get("sampler"); + double monotonicValue = ((Avg) sampler.getAggregations().get("mean_monotonic")).getValue(); + double numericValue = ((Avg) sampler.getAggregations().get("mean_numeric")).getValue(); + long docCount = sampler.getDocCount(); + assertEquals(monotonicValue, sampleMonotonicValue[0], tolerance); + assertEquals(numericValue, sampleNumericValue[0], tolerance); + assertEquals(docCount, sampledDocCount[0]); + }, + IntStream.rangeClosed(0, NUM_SAMPLE_RUNS - 1) + .mapToObj( + num -> prepareSearch("idx").setPreference("shard:0") + .addAggregation( + new RandomSamplerAggregationBuilder("sampler").setProbability(PROBABILITY) + .setSeed(0) + .subAggregation(avg("mean_monotonic").field(MONOTONIC_VALUE)) + .subAggregation(avg("mean_numeric").field(NUMERIC_VALUE)) + .setShardSeed(42) + ) + ) + .toArray(SearchRequestBuilder[]::new) + ); } public void testRandomSampler() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index d1841ebaf8071..87665c3d784f1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -54,6 +54,7 @@ import java.util.Set; import java.util.TreeMap; import java.util.concurrent.ExecutionException; +import java.util.function.Consumer; import java.util.function.Function; import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery; @@ -66,6 +67,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailuresAndResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSecondHit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -323,6 +325,12 @@ public void testRandomSorting() throws IOException, InterruptedException, Execut } public void test3078() { + Consumer assertConsumer = response -> { + assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); + assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); + assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); + }; + assertAcked(indicesAdmin().prepareCreate("test").setMapping("field", "type=keyword").get()); ensureGreen(); @@ -332,11 +340,7 @@ public void test3078() { refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex and refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); @@ -344,22 +348,14 @@ public void test3078() { assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // reindex - no refresh prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); // force merge forceMerge(); @@ -368,20 +364,12 @@ public void test3078() { prepareIndex("test").setId(Integer.toString(1)).setSource("field", Integer.toString(1)).get(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); refresh(); assertResponse( prepareSearch("test").setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("field").order(SortOrder.ASC)), - response -> { - assertThat(response.getHits().getAt(0).getSortValues()[0].toString(), equalTo("1")); - assertThat(response.getHits().getAt(1).getSortValues()[0].toString(), equalTo("10")); - assertThat(response.getHits().getAt(2).getSortValues()[0].toString(), equalTo("100")); - } + assertConsumer ); } @@ -395,39 +383,19 @@ public void testScoreSortDirection() throws Exception { refresh(); - assertResponse( + assertResponses(response -> { + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); + assertThat(response.getHits().getAt(1).getId(), equalTo("2")); + assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); + assertThat(response.getHits().getAt(2).getId(), equalTo("3")); + }, prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) ), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( - prepareSearch("test").setQuery( - QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getScore(), Matchers.lessThan(response.getHits().getAt(0).getScore())); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(2).getScore(), Matchers.lessThan(response.getHits().getAt(1).getScore())); - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - } - ); - assertResponse( prepareSearch("test").setQuery( QueryBuilders.functionScoreQuery(matchAllQuery(), ScoreFunctionBuilders.fieldValueFactorFunction("field")) - ).addSort("_score", SortOrder.DESC), - response -> { - assertThat(response.getHits().getAt(2).getId(), equalTo("3")); - assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - } + ).addSort("_score", SortOrder.DESC) ); } @@ -878,30 +846,20 @@ public void testSortMissingStrings() throws IOException { throw new RuntimeException(); } - logger.info("--> sort with no missing (same as missing _last)"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); + assertResponses(response -> { + assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(response.getHits().getAt(0).getId(), equalTo("1")); + assertThat(response.getHits().getAt(1).getId(), equalTo("3")); + assertThat(response.getHits().getAt(2).getId(), equalTo("2")); + }, + // "--> sort with no missing (same as missing _last)" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC)), + // "--> sort with missing _last" + prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")) ); - logger.info("--> sort with missing _last"); - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_last")), - response -> { - assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getId(), equalTo("3")); - assertThat(response.getHits().getAt(2).getId(), equalTo("2")); - } - ); logger.info("--> sort with missing _first"); assertResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("value").order(SortOrder.ASC).missing("_first")), @@ -1263,59 +1221,7 @@ public void testSortMVField() throws Exception { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).longValue(), equalTo(2L)); } ); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(-4)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(1)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(((Number) response.getHits().getAt(0).getSortValues()[0]).intValue(), equalTo(20)); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(((Number) response.getHits().getAt(1).getSortValues()[0]).intValue(), equalTo(10)); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1327,8 +1233,12 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(2))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); - }); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC) + ); + assertResponses(response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1340,7 +1250,11 @@ public void testSortMVField() throws Exception { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); - }); + }, + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), + prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC) + ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); @@ -1478,8 +1392,7 @@ public void testSortOnRareField() throws IOException { } refresh(); - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - + Consumer assertResponse = response -> { assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1490,27 +1403,17 @@ public void testSortOnRareField() throws IOException { assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + }; + + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); + for (int i = 0; i < 15; i++) { prepareIndex("test").setId(Integer.toString(300 + i)) .setSource(jsonBuilder().startObject().array("some_other_field", "foobar").endObject()) .get(); refresh(); } - - assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), response -> { - - assertThat(response.getHits().getHits().length, equalTo(3)); - - assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); - assertThat(response.getHits().getAt(0).getSortValues()[0], equalTo("20")); - - assertThat(response.getHits().getAt(1).getId(), equalTo(Integer.toString(1))); - assertThat(response.getHits().getAt(1).getSortValues()[0], equalTo("10")); - - assertThat(response.getHits().getAt(2).getId(), equalTo(Integer.toString(3))); - assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("03")); - }); + assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(3).addSort("string_values", SortOrder.DESC), assertResponse); } public void testSortMetaField() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 1383f33a41d84..aabca1b9333f8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -34,6 +34,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertOrderedSearchHits; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSortValues; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.closeTo; @@ -292,49 +293,22 @@ public void testSinglePointGeoDistanceSort() throws ExecutionException, Interrup String hashPoint = "s037ms06g7h0"; - GeoDistanceSortBuilder geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - geoDistanceSortBuilder = new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2); - - assertResponse( - prepareSearch().setQuery(matchAllQuery()).addSort(geoDistanceSortBuilder.sortMode(SortMode.MIN).order(SortOrder.ASC)), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( + assertResponses( + response -> checkCorrectSortOrderForGeoSort(response), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, hashPoint).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, new GeoPoint(2, 2)).sortMode(SortMode.MIN).order(SortOrder.ASC)), + prepareSearch().setQuery(matchAllQuery()) + .addSort(new GeoDistanceSortBuilder(LOCATION_FIELD, 2, 2).sortMode(SortMode.MIN).order(SortOrder.ASC)), prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, "s037ms06g7h0"))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource(new SearchSourceBuilder().sort(SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0))), - response -> checkCorrectSortOrderForGeoSort(response) - ); - - assertResponse( prepareSearch().setSource( new SearchSourceBuilder().sort( SortBuilders.geoDistanceSort(LOCATION_FIELD, 2.0, 2.0).validation(GeoValidationMethod.COERCE) ) - ), - response -> checkCorrectSortOrderForGeoSort(response) + ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index ec9c680e17fc3..9d53eb03eb04e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -22,12 +22,14 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class MetadataFetchingIT extends ESIntegTestCase { + public void testSimple() { assertAcked(prepareCreate("test")); ensureGreen(); @@ -35,17 +37,14 @@ public void testSimple() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getVersion(), notNullValue()); - }); - - assertResponse(prepareSearch("test").storedFields("_none_"), response -> { - assertThat(response.getHits().getAt(0).getId(), nullValue()); - assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); - }); + }, + prepareSearch("test").storedFields("_none_").setFetchSource(false).setVersion(true), + prepareSearch("test").storedFields("_none_") + ); } public void testInnerHits() { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java index 616fc2e1f3483..0e7f8b604a8df 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/SourceFetchingIT.java @@ -12,11 +12,13 @@ import org.elasticsearch.test.ESIntegTestCase; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponses; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.core.IsEqual.equalTo; public class SourceFetchingIT extends ESIntegTestCase { + public void testSourceDefaultBehavior() { createIndex("test"); ensureGreen(); @@ -24,18 +26,16 @@ public void testSourceDefaultBehavior() { indexDoc("test", "1", "field", "value"); refresh(); - assertResponse(prepareSearch("test"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue())); + assertResponses( + response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()), + prepareSearch("test"), + prepareSearch("test").addStoredField("_source") + ); assertResponse( prepareSearch("test").addStoredField("bla"), response -> assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()) ); - - assertResponse( - prepareSearch("test").addStoredField("_source"), - response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) - ); - } public void testSourceFiltering() { @@ -55,20 +55,20 @@ public void testSourceFiltering() { response -> assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()) ); - assertResponse(prepareSearch("test").setFetchSource("field1", null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource("field1", null), + prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }) + ); + assertResponse(prepareSearch("test").setFetchSource("hello", null), response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(0)); }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*" }, new String[] { "field2" }), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field1"), equalTo("value")); - }); + } /** @@ -82,15 +82,13 @@ public void testSourceWithWildcardFiltering() { prepareIndex("test").setId("1").setSource("field", "value").get(); refresh(); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), response -> { - assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); - assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); - assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); - assertResponse(prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null), response -> { + assertResponses(response -> { assertThat(response.getHits().getAt(0).getSourceAsString(), notNullValue()); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat((String) response.getHits().getAt(0).getSourceAsMap().get("field"), equalTo("value")); - }); + }, + prepareSearch("test").setFetchSource(new String[] { "*.notexisting", "field" }, null), + prepareSearch("test").setFetchSource(new String[] { "field.notexisting.*", "field" }, null) + ); } } From bff8ce65c9f06f1b5c47e33f25a47a79fa2897aa Mon Sep 17 00:00:00 2001 From: elasticsearchmachine <58790826+elasticsearchmachine@users.noreply.github.com> Date: Sat, 23 Nov 2024 01:39:21 +1100 Subject: [PATCH 40/50] Mute org.elasticsearch.xpack.inference.DefaultEndPointsIT testInferDeploysDefaultElser #114913 --- muted-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/muted-tests.yml b/muted-tests.yml index 8be390e670c9b..fcc3b6b6e9b12 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -244,6 +244,9 @@ tests: - class: org.elasticsearch.xpack.searchablesnapshots.RetrySearchIntegTests method: testRetryPointInTime issue: https://github.com/elastic/elasticsearch/issues/117116 +- class: org.elasticsearch.xpack.inference.DefaultEndPointsIT + method: testInferDeploysDefaultElser + issue: https://github.com/elastic/elasticsearch/issues/114913 # Examples: # From f6ac6e1c3b5fde2137b2e8ef16fb0f93848d2bd1 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Fri, 22 Nov 2024 16:30:57 +0100 Subject: [PATCH 41/50] [Build] Remove deprecated BuildParams (#116984) --- .../ElasticsearchJavaPluginFuncTest.groovy | 3 - .../src/main/groovy/elasticsearch.fips.gradle | 2 - .../src/main/groovy/elasticsearch.ide.gradle | 1 - .../groovy/elasticsearch.stable-api.gradle | 1 - .../gradle/internal/info/BuildParams.java | 80 ------------------- .../internal/info/GlobalBuildInfoPlugin.java | 7 -- distribution/packages/build.gradle | 1 - libs/plugin-analysis-api/build.gradle | 2 - libs/simdvec/build.gradle | 1 - modules/aggregations/build.gradle | 2 - modules/data-streams/build.gradle | 1 - modules/ingest-attachment/build.gradle | 2 - .../qa/full-cluster-restart/build.gradle | 1 - modules/legacy-geo/build.gradle | 2 - modules/mapper-extras/build.gradle | 2 - modules/reindex/build.gradle | 1 - modules/repository-azure/build.gradle | 10 +-- modules/repository-gcs/build.gradle | 1 - modules/repository-s3/build.gradle | 7 +- modules/rest-root/build.gradle | 2 - plugins/analysis-icu/build.gradle | 2 - plugins/discovery-azure-classic/build.gradle | 6 +- plugins/discovery-ec2/build.gradle | 2 - plugins/discovery-gce/qa/gce/build.gradle | 2 - plugins/mapper-annotated-text/build.gradle | 2 - plugins/mapper-murmur3/build.gradle | 2 - plugins/repository-hdfs/build.gradle | 1 - .../build.gradle | 1 - qa/multi-cluster-search/build.gradle | 1 - qa/repository-multi-version/build.gradle | 1 - qa/rolling-upgrade/build.gradle | 1 - qa/verify-version-constants/build.gradle | 1 - server/build.gradle | 2 - .../apm-integration/build.gradle | 10 ++- test/external-modules/build.gradle | 10 ++- .../delayed-aggs/build.gradle | 1 - .../die-with-dignity/build.gradle | 10 ++- .../external-modules/error-query/build.gradle | 1 - .../esql-heap-attack/build.gradle | 1 - test/external-modules/jvm-crash/build.gradle | 1 - test/framework/build.gradle | 1 - test/immutable-collections-patch/build.gradle | 1 - x-pack/plugin/analytics/build.gradle | 7 +- .../plugin/async-search/qa/rest/build.gradle | 7 +- .../plugin/autoscaling/qa/rest/build.gradle | 7 +- x-pack/plugin/build.gradle | 8 +- x-pack/plugin/ccr/qa/build.gradle | 7 +- .../downgrade-to-basic-license/build.gradle | 7 ++ .../plugin/ccr/qa/multi-cluster/build.gradle | 7 ++ x-pack/plugin/core/build.gradle | 9 ++- .../qa/early-deprecation-rest/build.gradle | 8 +- .../plugin/deprecation/qa/rest/build.gradle | 8 +- .../downsample/qa/mixed-cluster/build.gradle | 1 - x-pack/plugin/downsample/qa/rest/build.gradle | 2 - .../downsample/qa/with-security/build.gradle | 1 - .../rest-with-advanced-security/build.gradle | 9 ++- .../enrich/qa/rest-with-security/build.gradle | 9 ++- x-pack/plugin/enrich/qa/rest/build.gradle | 9 ++- .../qa/full-cluster-restart/build.gradle | 1 - x-pack/plugin/eql/build.gradle | 8 +- .../eql/qa/ccs-rolling-upgrade/build.gradle | 2 - x-pack/plugin/eql/qa/correctness/build.gradle | 8 +- x-pack/plugin/eql/qa/mixed-node/build.gradle | 8 +- x-pack/plugin/eql/qa/rest/build.gradle | 9 ++- x-pack/plugin/eql/qa/security/build.gradle | 9 ++- x-pack/plugin/esql/build.gradle | 8 +- .../esql/qa/server/mixed-cluster/build.gradle | 7 +- .../qa/server/multi-clusters/build.gradle | 1 - x-pack/plugin/identity-provider/build.gradle | 8 +- .../qa/idp-rest-tests/build.gradle | 8 +- .../plugin/ilm/qa/multi-cluster/build.gradle | 8 +- x-pack/plugin/ilm/qa/multi-node/build.gradle | 8 +- x-pack/plugin/inference/build.gradle | 1 - .../inference/qa/mixed-cluster/build.gradle | 8 +- .../inference/qa/rolling-upgrade/build.gradle | 1 - x-pack/plugin/kql/build.gradle | 7 ++ x-pack/plugin/logsdb/build.gradle | 2 - .../plugin/logsdb/qa/with-basic/build.gradle | 2 - .../mapper-aggregate-metric/build.gradle | 2 - .../mapper-constant-keyword/build.gradle | 7 +- .../plugin/mapper-unsigned-long/build.gradle | 5 +- x-pack/plugin/mapper-version/build.gradle | 8 +- x-pack/plugin/ml/build.gradle | 7 +- .../ml/qa/basic-multi-node/build.gradle | 7 +- x-pack/plugin/ml/qa/disabled/build.gradle | 7 +- .../build.gradle | 8 +- .../ml/qa/single-node-tests/build.gradle | 7 +- .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 2 - .../qa/s3/build.gradle | 8 +- .../qa/azure/build.gradle | 7 +- .../searchable-snapshots/qa/gcs/build.gradle | 7 +- .../searchable-snapshots/qa/hdfs/build.gradle | 2 - .../searchable-snapshots/qa/s3/build.gradle | 8 +- x-pack/plugin/security/cli/build.gradle | 8 +- .../qa/basic-enable-security/build.gradle | 2 - .../plugin/security/qa/jwt-realm/build.gradle | 8 +- .../security/qa/multi-cluster/build.gradle | 1 - .../plugin/security/qa/profile/build.gradle | 7 +- .../security/qa/security-basic/build.gradle | 8 +- .../qa/security-disabled/build.gradle | 8 +- .../plugin/security/qa/tls-basic/build.gradle | 9 ++- .../qa/full-cluster-restart/build.gradle | 8 +- .../shutdown/qa/rolling-upgrade/build.gradle | 2 +- x-pack/plugin/slm/build.gradle | 7 +- x-pack/plugin/slm/qa/multi-node/build.gradle | 8 +- .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 7 +- .../qa/s3/build.gradle | 2 - .../qa/azure/build.gradle | 2 - .../qa/gcs/build.gradle | 2 - .../qa/hdfs/build.gradle | 2 - .../snapshot-repo-test-kit/qa/s3/build.gradle | 2 - x-pack/plugin/spatial/build.gradle | 7 +- x-pack/plugin/sql/build.gradle | 9 ++- x-pack/plugin/sql/qa/jdbc/build.gradle | 8 +- .../qa/jdbc/security/with-ssl/build.gradle | 7 +- x-pack/plugin/sql/qa/mixed-node/build.gradle | 12 ++- .../qa/server/security/with-ssl/build.gradle | 7 +- x-pack/plugin/sql/sql-cli/build.gradle | 7 +- .../build.gradle | 8 +- x-pack/plugin/watcher/qa/rest/build.gradle | 7 +- x-pack/plugin/wildcard/build.gradle | 7 +- .../build.gradle | 8 +- x-pack/qa/full-cluster-restart/build.gradle | 8 +- x-pack/qa/mixed-tier-cluster/build.gradle | 12 ++- .../legacy-with-basic-license/build.gradle | 8 +- .../legacy-with-full-license/build.gradle | 8 +- .../legacy-with-restricted-trust/build.gradle | 8 +- x-pack/qa/oidc-op-tests/build.gradle | 7 +- x-pack/qa/rolling-upgrade-basic/build.gradle | 8 +- .../build.gradle | 8 +- x-pack/qa/rolling-upgrade/build.gradle | 8 +- x-pack/qa/smoke-test-plugins-ssl/build.gradle | 9 ++- x-pack/qa/smoke-test-plugins/build.gradle | 8 +- x-pack/qa/third-party/jira/build.gradle | 9 ++- x-pack/qa/third-party/pagerduty/build.gradle | 7 +- x-pack/qa/third-party/slack/build.gradle | 7 +- 138 files changed, 522 insertions(+), 278 deletions(-) delete mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy index 9fc6aa7276b2d..36a43c4b739b6 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/ElasticsearchJavaPluginFuncTest.groovy @@ -20,9 +20,6 @@ class ElasticsearchJavaPluginFuncTest extends AbstractGradleInternalPluginFuncTe when: buildFile.text << """ import org.elasticsearch.gradle.Architecture - import org.elasticsearch.gradle.internal.info.BuildParams - BuildParams.init { it.setMinimumRuntimeVersion(JavaVersion.VERSION_1_10) } - assert tasks.named('compileJava').get().sourceCompatibility == JavaVersion.VERSION_1_10.toString() assert tasks.named('compileJava').get().targetCompatibility == JavaVersion.VERSION_1_10.toString() """ diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index 3c9cf121813c9..14e2323b4d14d 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -9,11 +9,9 @@ import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.RestTestBasePlugin import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClustersAware -import org.elasticsearch.gradle.testclusters.TestDistribution //apply plugin: org.elasticsearch.gradle.internal.info.GlobalBuildInfoPlugin diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 431d51d6c1275..9237c3ae8918c 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.util.Pair import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.TestUtil import org.jetbrains.gradle.ext.JUnit diff --git a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle index 27b490329f8cb..3f506ae954df8 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.stable-api.gradle @@ -3,7 +3,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.BwcVersions import org.elasticsearch.gradle.internal.JarApiComparisonTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.internal.InternalDistributionBwcSetupPlugin.buildBwcTaskName diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java deleted file mode 100644 index ea8aeda8fc099..0000000000000 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/BuildParams.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.gradle.internal.info; - -import java.lang.reflect.Modifier; -import java.util.Arrays; -import java.util.function.Consumer; - -@Deprecated -public class BuildParams { - private static Boolean isCi; - - /** - * Initialize global build parameters. This method accepts and a initialization function which in turn accepts a - * {@link MutableBuildParams}. Initialization can be done in "stages", therefore changes override existing values, and values from - * previous calls to {@link #init(Consumer)} carry forward. In cases where you want to clear existing values - * {@link MutableBuildParams#reset()} may be used. - * - * @param initializer Build parameter initializer - */ - public static void init(Consumer initializer) { - initializer.accept(MutableBuildParams.INSTANCE); - } - - public static Boolean isCi() { - return value(isCi); - } - - private static T value(T object) { - if (object == null) { - String callingMethod = Thread.currentThread().getStackTrace()[2].getMethodName(); - - throw new IllegalStateException( - "Build parameter '" - + propertyName(callingMethod) - + "' has not been initialized.\n" - + "Perhaps the plugin responsible for initializing this property has not been applied." - ); - } - - return object; - } - - private static String propertyName(String methodName) { - String propertyName = methodName.startsWith("is") ? methodName.substring("is".length()) : methodName.substring("get".length()); - return propertyName.substring(0, 1).toLowerCase() + propertyName.substring(1); - } - - public static class MutableBuildParams { - private static MutableBuildParams INSTANCE = new MutableBuildParams(); - - private MutableBuildParams() {} - - /** - * Resets any existing values from previous initializations. - */ - public void reset() { - Arrays.stream(BuildParams.class.getDeclaredFields()).filter(f -> Modifier.isStatic(f.getModifiers())).forEach(f -> { - try { - // Since we are mutating private static fields from a public static inner class we need to suppress - // accessibility controls here. - f.setAccessible(true); - f.set(null, null); - } catch (IllegalAccessException e) { - throw new RuntimeException(e); - } - }); - } - - public void setIsCi(boolean isCi) { - BuildParams.isCi = isCi; - } - } -} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 761b0601a1c24..0535026b2594e 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -152,13 +152,6 @@ public void apply(Project project) { spec.getParameters().getBuildParams().set(buildParams); }); - BuildParams.init(params -> { - params.reset(); - params.setIsCi( - System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null - ); - }); - // Enforce the minimum compiler version assertMinimumCompilerVersion(minimumCompilerVersion); diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 7d60137ac86b1..486c95d15c7a1 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -10,7 +10,6 @@ import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams import org.redline_rpm.header.Flags import java.nio.file.Files diff --git a/libs/plugin-analysis-api/build.gradle b/libs/plugin-analysis-api/build.gradle index 3f1670d76a0c1..41fbbdbafe998 100644 --- a/libs/plugin-analysis-api/build.gradle +++ b/libs/plugin-analysis-api/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/libs/simdvec/build.gradle b/libs/simdvec/build.gradle index ffc50ecb1f6ff..95b8ddf28cf2f 100644 --- a/libs/simdvec/build.gradle +++ b/libs/simdvec/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask apply plugin: 'elasticsearch.publish' diff --git a/modules/aggregations/build.gradle b/modules/aggregations/build.gradle index 2835180904620..94fdddf6d711a 100644 --- a/modules/aggregations/build.gradle +++ b/modules/aggregations/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index b6fc1e3722ccd..b017ae9921b0e 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.test-with-dependencies' diff --git a/modules/ingest-attachment/build.gradle b/modules/ingest-attachment/build.gradle index 821de8f834a44..8fe2b82fe21fb 100644 --- a/modules/ingest-attachment/build.gradle +++ b/modules/ingest-attachment/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle index 8e7d20108a869..29cc6d7184bf2 100644 --- a/modules/ingest-geoip/qa/full-cluster-restart/build.gradle +++ b/modules/ingest-geoip/qa/full-cluster-restart/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/legacy-geo/build.gradle b/modules/legacy-geo/build.gradle index 1b4fd9d52bbaf..55171221396a3 100644 --- a/modules/legacy-geo/build.gradle +++ b/modules/legacy-geo/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/modules/mapper-extras/build.gradle b/modules/mapper-extras/build.gradle index a7bdc11e15550..eda55fe6de9da 100644 --- a/modules/mapper-extras/build.gradle +++ b/modules/mapper-extras/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index bb1500ba55664..b4a1c9cd6248d 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -10,7 +10,6 @@ import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.transform.UnzipTransform diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 4babac68f1e71..8c1ca3891bc1e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -1,8 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin -import org.elasticsearch.gradle.internal.test.RestIntegTestTask - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -11,6 +6,11 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin +import org.elasticsearch.gradle.internal.test.RestIntegTestTask + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index 605d886a71056..811645d154c7a 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -9,7 +9,6 @@ import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin import java.nio.file.Files diff --git a/modules/repository-s3/build.gradle b/modules/repository-s3/build.gradle index c1cd1a13719a7..1301d17606d63 100644 --- a/modules/repository-s3/build.gradle +++ b/modules/repository-s3/build.gradle @@ -1,7 +1,3 @@ -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -10,6 +6,9 @@ import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.apache.tools.ant.filters.ReplaceTokens +import org.elasticsearch.gradle.internal.test.InternalClusterTestPlugin + apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/modules/rest-root/build.gradle b/modules/rest-root/build.gradle index 05a545a1ed671..adb8aeb02863f 100644 --- a/modules/rest-root/build.gradle +++ b/modules/rest-root/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/analysis-icu/build.gradle b/plugins/analysis-icu/build.gradle index f9245ed32c325..05cd2cb44124c 100644 --- a/plugins/analysis-icu/build.gradle +++ b/plugins/analysis-icu/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 16786c6c31074..3ec2ec531ae92 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.LoggedExec -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License @@ -9,6 +6,9 @@ import org.elasticsearch.gradle.internal.info.BuildParams * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.LoggedExec + apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index f281db5279660..980e2467206d7 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle index a22678b9a67dc..72cb429b49072 100644 --- a/plugins/discovery-gce/qa/gce/build.gradle +++ b/plugins/discovery-gce/qa/gce/build.gradle @@ -7,9 +7,7 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ - import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/plugins/mapper-annotated-text/build.gradle b/plugins/mapper-annotated-text/build.gradle index 545dfe49bfcf3..ff7230701aa0a 100644 --- a/plugins/mapper-annotated-text/build.gradle +++ b/plugins/mapper-annotated-text/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/mapper-murmur3/build.gradle b/plugins/mapper-murmur3/build.gradle index e5108814154a3..15d7f6249695b 100644 --- a/plugins/mapper-murmur3/build.gradle +++ b/plugins/mapper-murmur3/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the "Elastic License diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index b7f7816a3a0e1..6c2dc56b17eb2 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.OS -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' diff --git a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle index ce5b840e6dc91..e63b1629db39c 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/build.gradle +++ b/qa/ccs-rolling-upgrade-remote-cluster/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 906a49134bb51..d46bf3f18f8cc 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -15,7 +15,6 @@ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/repository-multi-version/build.gradle b/qa/repository-multi-version/build.gradle index 79a8be4c1be24..646a7974868c4 100644 --- a/qa/repository-multi-version/build.gradle +++ b/qa/repository-multi-version/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index 2f717f201f248..1d7475427b33b 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/qa/verify-version-constants/build.gradle b/qa/verify-version-constants/build.gradle index ee29da53dc51b..67fc962e087cb 100644 --- a/qa/verify-version-constants/build.gradle +++ b/qa/verify-version-constants/build.gradle @@ -8,7 +8,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/server/build.gradle b/server/build.gradle index bc8decfa8babc..0bd807751ecbb 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -7,8 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/test/external-modules/apm-integration/build.gradle b/test/external-modules/apm-integration/build.gradle index 91e01d363749c..7f64b33b81423 100644 --- a/test/external-modules/apm-integration/build.gradle +++ b/test/external-modules/apm-integration/build.gradle @@ -1,4 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/test/external-modules/build.gradle b/test/external-modules/build.gradle index 1b1e61a69e190..3ba6b309071f3 100644 --- a/test/external-modules/build.gradle +++ b/test/external-modules/build.gradle @@ -1,5 +1,11 @@ - -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ subprojects { apply plugin: 'elasticsearch.base-internal-es-plugin' diff --git a/test/external-modules/delayed-aggs/build.gradle b/test/external-modules/delayed-aggs/build.gradle index f57bd37d65171..fae5e93b37fc3 100644 --- a/test/external-modules/delayed-aggs/build.gradle +++ b/test/external-modules/delayed-aggs/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/test/external-modules/die-with-dignity/build.gradle b/test/external-modules/die-with-dignity/build.gradle index 1f98e43052589..6c1da40406a5d 100644 --- a/test/external-modules/die-with-dignity/build.gradle +++ b/test/external-modules/die-with-dignity/build.gradle @@ -1,4 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/test/external-modules/error-query/build.gradle b/test/external-modules/error-query/build.gradle index ff4783552ebf5..3c72145c11f8b 100644 --- a/test/external-modules/error-query/build.gradle +++ b/test/external-modules/error-query/build.gradle @@ -7,7 +7,6 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' tasks.named('yamlRestTest').configure { diff --git a/test/external-modules/esql-heap-attack/build.gradle b/test/external-modules/esql-heap-attack/build.gradle index 3d6291f6d011a..ba85e0dbd8693 100644 --- a/test/external-modules/esql-heap-attack/build.gradle +++ b/test/external-modules/esql-heap-attack/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless diff --git a/test/external-modules/jvm-crash/build.gradle b/test/external-modules/jvm-crash/build.gradle index 73ad8b851a220..0b06142e81939 100644 --- a/test/external-modules/jvm-crash/build.gradle +++ b/test/external-modules/jvm-crash/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' // Necessary to use tests in Serverless diff --git a/test/framework/build.gradle b/test/framework/build.gradle index c61a3b1851ea9..126b95041da11 100644 --- a/test/framework/build.gradle +++ b/test/framework/build.gradle @@ -6,7 +6,6 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ -import org.elasticsearch.gradle.internal.info.BuildParams; apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' diff --git a/test/immutable-collections-patch/build.gradle b/test/immutable-collections-patch/build.gradle index 381c0cd6dd044..85a199af2d477 100644 --- a/test/immutable-collections-patch/build.gradle +++ b/test/immutable-collections-patch/build.gradle @@ -9,7 +9,6 @@ import org.elasticsearch.gradle.OS import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.java' diff --git a/x-pack/plugin/analytics/build.gradle b/x-pack/plugin/analytics/build.gradle index ddc075cc9adcc..00f28b4badc3d 100644 --- a/x-pack/plugin/analytics/build.gradle +++ b/x-pack/plugin/analytics/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/async-search/qa/rest/build.gradle b/x-pack/plugin/async-search/qa/rest/build.gradle index c950646930779..eb758c2c0ef5e 100644 --- a/x-pack/plugin/async-search/qa/rest/build.gradle +++ b/x-pack/plugin/async-search/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/autoscaling/qa/rest/build.gradle b/x-pack/plugin/autoscaling/qa/rest/build.gradle index c79644ee31225..903e76fd986cf 100644 --- a/x-pack/plugin/autoscaling/qa/rest/build.gradle +++ b/x-pack/plugin/autoscaling/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 48b1d478ddf94..26040529b04df 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask import org.elasticsearch.gradle.util.GradleUtils diff --git a/x-pack/plugin/ccr/qa/build.gradle b/x-pack/plugin/ccr/qa/build.gradle index 4be504e616920..d5bc38d2e8dd5 100644 --- a/x-pack/plugin/ccr/qa/build.gradle +++ b/x-pack/plugin/ccr/qa/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.java' diff --git a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle index ac8ce1b0fd331..86f974ed13359 100644 --- a/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle +++ b/x-pack/plugin/ccr/qa/downgrade-to-basic-license/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClusterValueSource import org.elasticsearch.gradle.testclusters.TestClustersPlugin diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index 86abbbbeedf6b..61678784e6b38 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.testclusters.TestClusterValueSource diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index d4c3f67bf3ebb..51d770936e64e 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -1,7 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.Version - import java.nio.file.Paths apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle index a9580f4e14d6b..7e61533c818ec 100644 --- a/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/early-deprecation-rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/deprecation/qa/rest/build.gradle b/x-pack/plugin/deprecation/qa/rest/build.gradle index 9a8b228763fe0..45b543d910a75 100644 --- a/x-pack/plugin/deprecation/qa/rest/build.gradle +++ b/x-pack/plugin/deprecation/qa/rest/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.base-internal-es-plugin' apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle index c4f2a239d48e2..236c851febd6c 100644 --- a/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/downsample/qa/mixed-cluster/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-yaml-rest-test' diff --git a/x-pack/plugin/downsample/qa/rest/build.gradle b/x-pack/plugin/downsample/qa/rest/build.gradle index c5cfbea000ebe..54e07558464d1 100644 --- a/x-pack/plugin/downsample/qa/rest/build.gradle +++ b/x-pack/plugin/downsample/qa/rest/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' diff --git a/x-pack/plugin/downsample/qa/with-security/build.gradle b/x-pack/plugin/downsample/qa/with-security/build.gradle index 849c242f372bd..29980b95d0291 100644 --- a/x-pack/plugin/downsample/qa/with-security/build.gradle +++ b/x-pack/plugin/downsample/qa/with-security/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle index 2e649e718b081..6a1f820e36205 100644 --- a/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-advanced-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle index 844cfcc61adff..17a213a6e7f0d 100644 --- a/x-pack/plugin/enrich/qa/rest-with-security/build.gradle +++ b/x-pack/plugin/enrich/qa/rest-with-security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('core')) diff --git a/x-pack/plugin/enrich/qa/rest/build.gradle b/x-pack/plugin/enrich/qa/rest/build.gradle index 637ab21a98fd7..cf3c687004cbb 100644 --- a/x-pack/plugin/enrich/qa/rest/build.gradle +++ b/x-pack/plugin/enrich/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams restResources { restApi { diff --git a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle index 47a1ffaa37fa4..1e1973a118074 100644 --- a/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/ent-search/qa/full-cluster-restart/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/eql/build.gradle b/x-pack/plugin/eql/build.gradle index b0b5fefa37fcd..9ae67f0e27c2b 100644 --- a/x-pack/plugin/eql/build.gradle +++ b/x-pack/plugin/eql/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' esplugin { diff --git a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle index cbea0896264d5..bc1a44f94d18a 100644 --- a/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle +++ b/x-pack/plugin/eql/qa/ccs-rolling-upgrade/build.gradle @@ -5,9 +5,7 @@ * 2.0. */ - import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index a791356499f5c..7ca6e8f134d20 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -1,9 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.internal-testclusters' import org.elasticsearch.gradle.testclusters.RunTask -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { javaRestTestImplementation project(':test:framework') diff --git a/x-pack/plugin/eql/qa/mixed-node/build.gradle b/x-pack/plugin/eql/qa/mixed-node/build.gradle index d3aa227c7ef88..bbeb439ab6155 100644 --- a/x-pack/plugin/eql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/eql/qa/mixed-node/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.bwc-test' import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask dependencies { diff --git a/x-pack/plugin/eql/qa/rest/build.gradle b/x-pack/plugin/eql/qa/rest/build.gradle index 00f196d863f2e..0ffecefb934f7 100644 --- a/x-pack/plugin/eql/qa/rest/build.gradle +++ b/x-pack/plugin/eql/qa/rest/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' apply plugin: 'elasticsearch.internal-test-artifact' -import org.elasticsearch.gradle.internal.info.BuildParams - restResources { restApi { include '_common', 'bulk', 'indices', 'eql' diff --git a/x-pack/plugin/eql/qa/security/build.gradle b/x-pack/plugin/eql/qa/security/build.gradle index 1f0f949cab706..9072a9a7bad3e 100644 --- a/x-pack/plugin/eql/qa/security/build.gradle +++ b/x-pack/plugin/eql/qa/security/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.internal-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { javaRestTestImplementation project(path: xpackModule('eql:qa:common')) diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index 201863108a6c8..f92c895cc5b7b 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + plugins { id 'idea' } -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask; import org.elasticsearch.gradle.internal.util.SourceDirectoryCommandLineArgumentProvider; import static org.elasticsearch.gradle.util.PlatformUtils.normalize diff --git a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle index 68c0e8e30f814..eac5d5764d4b2 100644 --- a/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle +++ b/x-pack/plugin/esql/qa/server/mixed-cluster/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.util.GradleUtils -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle index 2c432eb94ebf1..7f3859e2229ef 100644 --- a/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle +++ b/x-pack/plugin/esql/qa/server/multi-clusters/build.gradle @@ -6,7 +6,6 @@ */ import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/identity-provider/build.gradle b/x-pack/plugin/identity-provider/build.gradle index f3b0def7eee97..f9c121da0f550 100644 --- a/x-pack/plugin/identity-provider/build.gradle +++ b/x-pack/plugin/identity-provider/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.publish' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle index b109c01181729..cbdb25825623d 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { diff --git a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle index 256225c5ef3bf..8bc2967fc63de 100644 --- a/x-pack/plugin/ilm/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-cluster/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.info.BuildParams import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index d420ac9effdde..4cd41e58b11ac 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/inference/build.gradle b/x-pack/plugin/inference/build.gradle index 29d5add35ff49..3c19e11a450b4 100644 --- a/x-pack/plugin/inference/build.gradle +++ b/x-pack/plugin/inference/build.gradle @@ -4,7 +4,6 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle index c05e71fa1cd55..0f8c732154e85 100644 --- a/x-pack/plugin/inference/qa/mixed-cluster/build.gradle +++ b/x-pack/plugin/inference/qa/mixed-cluster/build.gradle @@ -1,6 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle index bfaff7c84d9ad..214d775b46236 100644 --- a/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/inference/qa/rolling-upgrade/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/kql/build.gradle b/x-pack/plugin/kql/build.gradle index 79f2c91114bd9..76a4bd5aff777 100644 --- a/x-pack/plugin/kql/build.gradle +++ b/x-pack/plugin/kql/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.util.PlatformUtils.normalize apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/logsdb/build.gradle b/x-pack/plugin/logsdb/build.gradle index 60578f832d153..1aef69e0e3fac 100644 --- a/x-pack/plugin/logsdb/build.gradle +++ b/x-pack/plugin/logsdb/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/logsdb/qa/with-basic/build.gradle b/x-pack/plugin/logsdb/qa/with-basic/build.gradle index 44ebd83bf4f4c..9729ac9c29cef 100644 --- a/x-pack/plugin/logsdb/qa/with-basic/build.gradle +++ b/x-pack/plugin/logsdb/qa/with-basic/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/mapper-aggregate-metric/build.gradle b/x-pack/plugin/mapper-aggregate-metric/build.gradle index bae5acc21fc75..2a7841929b21d 100644 --- a/x-pack/plugin/mapper-aggregate-metric/build.gradle +++ b/x-pack/plugin/mapper-aggregate-metric/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 3b11d951fe37a..4f50246450f3f 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index faad1db822560..7eff1bfe94a3a 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -1,6 +1,3 @@ -import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams - /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -8,6 +5,8 @@ import org.elasticsearch.gradle.internal.info.BuildParams * 2.0. */ +import org.elasticsearch.gradle.Version + evaluationDependsOn(xpackModule('core')) apply plugin: 'elasticsearch.internal-es-plugin' diff --git a/x-pack/plugin/mapper-version/build.gradle b/x-pack/plugin/mapper-version/build.gradle index fb760b3446dfd..a87def29620c7 100644 --- a/x-pack/plugin/mapper-version/build.gradle +++ b/x-pack/plugin/mapper-version/build.gradle @@ -1,8 +1,12 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ evaluationDependsOn(xpackModule('core')) - apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-compat-test' diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 67c26c78a6741..716c401a9fcc8 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3854c70b0f389..07dc1cc3c612a 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/disabled/build.gradle b/x-pack/plugin/ml/qa/disabled/build.gradle index 0d1d8d6484afc..9d157b3e7fa32 100644 --- a/x-pack/plugin/ml/qa/disabled/build.gradle +++ b/x-pack/plugin/ml/qa/disabled/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index b43132c2daf50..c0d6913d85590 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/ml/qa/single-node-tests/build.gradle b/x-pack/plugin/ml/qa/single-node-tests/build.gradle index 5ed1c5179716f..02421d9bb3d14 100644 --- a/x-pack/plugin/ml/qa/single-node-tests/build.gradle +++ b/x-pack/plugin/ml/qa/single-node-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle index 4683c13f1fc0c..43c78bfc887b7 100644 --- a/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle index 62fe47c08f5f5..984590f42256c 100644 --- a/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle index 3c58e6a06af69..5c83e8980a474 100644 --- a/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle +++ b/x-pack/plugin/repositories-metering-api/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle index e2f77fae89225..de5ec42147d3f 100644 --- a/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/azure/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle index c0a420aff313a..0340453d0840b 100644 --- a/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle index e8d97da9a9e37..b41e0f8dcc1cf 100644 --- a/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' apply plugin: 'elasticsearch.internal-available-ports' diff --git a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle index 430df2a7e8122..1659c592e5e64 100644 --- a/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/s3/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/security/cli/build.gradle b/x-pack/plugin/security/cli/build.gradle index 8fd3dd29f87a4..d450a38dd1d29 100644 --- a/x-pack/plugin/security/cli/build.gradle +++ b/x-pack/plugin/security/cli/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' diff --git a/x-pack/plugin/security/qa/basic-enable-security/build.gradle b/x-pack/plugin/security/qa/basic-enable-security/build.gradle index a6930d38d41e5..72deed1af72dd 100644 --- a/x-pack/plugin/security/qa/basic-enable-security/build.gradle +++ b/x-pack/plugin/security/qa/basic-enable-security/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/security/qa/jwt-realm/build.gradle b/x-pack/plugin/security/qa/jwt-realm/build.gradle index bc7178f11d9fc..1f7b7c1038fad 100644 --- a/x-pack/plugin/security/qa/jwt-realm/build.gradle +++ b/x-pack/plugin/security/qa/jwt-realm/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.Version -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/multi-cluster/build.gradle b/x-pack/plugin/security/qa/multi-cluster/build.gradle index 8ee449d39dcce..5b682cfdccade 100644 --- a/x-pack/plugin/security/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/security/qa/multi-cluster/build.gradle @@ -5,7 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index 7465ef9917258..b0a1927ab9dfe 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/security-basic/build.gradle b/x-pack/plugin/security/qa/security-basic/build.gradle index 30751705bd75f..8740354646346 100644 --- a/x-pack/plugin/security/qa/security-basic/build.gradle +++ b/x-pack/plugin/security/qa/security-basic/build.gradle @@ -1,8 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) javaRestTestImplementation(testArtifact(project(xpackModule('core')))) diff --git a/x-pack/plugin/security/qa/security-disabled/build.gradle b/x-pack/plugin/security/qa/security-disabled/build.gradle index 0a05eae479d33..6fa100f392b9a 100644 --- a/x-pack/plugin/security/qa/security-disabled/build.gradle +++ b/x-pack/plugin/security/qa/security-disabled/build.gradle @@ -1,3 +1,10 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + /* * This QA project tests the security plugin when security is explicitly disabled. * It is intended to cover security functionality which is supposed to @@ -5,7 +12,6 @@ * For example: If a cluster has a pipeline with the set_security_user processor * defined, it should be not fail */ -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/security/qa/tls-basic/build.gradle b/x-pack/plugin/security/qa/tls-basic/build.gradle index e3b51bde45cc8..c0df6a4f27f58 100644 --- a/x-pack/plugin/security/qa/tls-basic/build.gradle +++ b/x-pack/plugin/security/qa/tls-basic/build.gradle @@ -1,6 +1,11 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +apply plugin: 'elasticsearch.legacy-java-rest-test' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('security')))) diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 515ffca4a59bf..60b0b372ba14c 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle index 4c98276abe154..17996ce82a453 100644 --- a/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle +++ b/x-pack/plugin/shutdown/qa/rolling-upgrade/build.gradle @@ -6,7 +6,7 @@ */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/plugin/slm/build.gradle b/x-pack/plugin/slm/build.gradle index d9511fe67e8e0..b54e31315f709 100644 --- a/x-pack/plugin/slm/build.gradle +++ b/x-pack/plugin/slm/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/slm/qa/multi-node/build.gradle b/x-pack/plugin/slm/qa/multi-node/build.gradle index d6b1fe8a1e219..afbae8932e292 100644 --- a/x-pack/plugin/slm/qa/multi-node/build.gradle +++ b/x-pack/plugin/slm/qa/multi-node/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE apply plugin: 'elasticsearch.legacy-java-rest-test' diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle index cb2831f0cf273..7f69d6b7e56eb 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle index 7550ab8585e13..4d39ca95312aa 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/gcs/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle index e676e1f1f2162..07909bf4cdbc1 100644 --- a/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-based-recoveries/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle index af4ed719a9c2f..5f195e983d191 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/azure/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ - -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle index b7e1036ab3e26..176a441279aab 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/gcs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle index 14e2b05bc140e..81eb82a522389 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/hdfs/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle index 313a11f8ce431..33398d5b8064b 100644 --- a/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle +++ b/x-pack/plugin/snapshot-repo-test-kit/qa/s3/build.gradle @@ -5,8 +5,6 @@ * 2.0. */ -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/plugin/spatial/build.gradle b/x-pack/plugin/spatial/build.gradle index 4304bae5b9991..6299908f0dc14 100644 --- a/x-pack/plugin/spatial/build.gradle +++ b/x-pack/plugin/spatial/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' diff --git a/x-pack/plugin/sql/build.gradle b/x-pack/plugin/sql/build.gradle index d1dcbc3adbd95..69468bf574956 100644 --- a/x-pack/plugin/sql/build.gradle +++ b/x-pack/plugin/sql/build.gradle @@ -1,8 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' -import org.elasticsearch.gradle.internal.info.BuildParams - esplugin { name = 'x-pack-sql' description 'The Elasticsearch plugin that powers SQL for Elasticsearch' diff --git a/x-pack/plugin/sql/qa/jdbc/build.gradle b/x-pack/plugin/sql/qa/jdbc/build.gradle index a444399ed28ce..e93d3b72f1de9 100644 --- a/x-pack/plugin/sql/qa/jdbc/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/build.gradle @@ -1,7 +1,13 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions.UnreleasedVersionInfo import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask description = 'Integration tests for SQL JDBC driver' diff --git a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle index 971c7bf319244..1637cad33c76d 100644 --- a/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/jdbc/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/qa/mixed-node/build.gradle b/x-pack/plugin/sql/qa/mixed-node/build.gradle index 06e3b61d5b303..35600fda0eb33 100644 --- a/x-pack/plugin/sql/qa/mixed-node/build.gradle +++ b/x-pack/plugin/sql/qa/mixed-node/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') javaRestTestImplementation(project(xpackModule('ql:test-fixtures'))) diff --git a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle index 51a3f83a909af..0b9c515c48be2 100644 --- a/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle +++ b/x-pack/plugin/sql/qa/server/security/with-ssl/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.test-with-ssl' diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index cd24dcc15c863..bd8788191cfa2 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ /* * This project is named sql-cli because it is in the "org.elasticsearch.plugin" diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index eb0551a4d10e1..b4ee0bee76d9d 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -1,8 +1,14 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' diff --git a/x-pack/plugin/watcher/qa/rest/build.gradle b/x-pack/plugin/watcher/qa/rest/build.gradle index 8382a71092720..2d5fc8349b5e0 100644 --- a/x-pack/plugin/watcher/qa/rest/build.gradle +++ b/x-pack/plugin/watcher/qa/rest/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-java-rest-test' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/plugin/wildcard/build.gradle b/x-pack/plugin/wildcard/build.gradle index b582f3fcea903..760ad407575d7 100644 --- a/x-pack/plugin/wildcard/build.gradle +++ b/x-pack/plugin/wildcard/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 8a67a2c1dde0d..65f2282014dc4 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + apply plugin: 'elasticsearch.internal-yaml-rest-test' -import org.elasticsearch.gradle.internal.info.BuildParams dependencies { testImplementation project(':x-pack:qa') diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index d6b05242f613b..ee0955c6db082 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/mixed-tier-cluster/build.gradle b/x-pack/qa/mixed-tier-cluster/build.gradle index 79e7d6a655993..bee28c47dc867 100644 --- a/x-pack/qa/mixed-tier-cluster/build.gradle +++ b/x-pack/qa/mixed-tier-cluster/build.gradle @@ -1,10 +1,16 @@ -apply plugin: 'elasticsearch.legacy-java-rest-test' -apply plugin: 'elasticsearch.bwc-test' +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask +apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.bwc-test' + dependencies { javaRestTestImplementation project(':x-pack:qa') } diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle index 9c0648abca21b..83c231da7529c 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-basic-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle index ca79bb7ec3825..6e95d718b19de 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-full-license/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle index b9f8369763476..5c6235e092458 100644 --- a/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle +++ b/x-pack/qa/multi-cluster-search-security/legacy-with-restricted-trust/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.test.RestIntegTestTask import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index b53539b224861..43d1cd12cdfb7 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.internal-java-rest-test' diff --git a/x-pack/qa/rolling-upgrade-basic/build.gradle b/x-pack/qa/rolling-upgrade-basic/build.gradle index 09b3b7db7c917..9a447f35eb13c 100644 --- a/x-pack/qa/rolling-upgrade-basic/build.gradle +++ b/x-pack/qa/rolling-upgrade-basic/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle index 0d1cfbd5ff022..ebcb4cd9760fe 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle +++ b/x-pack/qa/rolling-upgrade-multi-cluster/build.gradle @@ -1,4 +1,10 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 60fb55e9a2593..2049ccb5d9cc8 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.elasticsearch.gradle.internal.BwcVersions -import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask apply plugin: 'elasticsearch.internal-testclusters' diff --git a/x-pack/qa/smoke-test-plugins-ssl/build.gradle b/x-pack/qa/smoke-test-plugins-ssl/build.gradle index da2d095c001d4..461ebc4beb443 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/build.gradle +++ b/x-pack/qa/smoke-test-plugins-ssl/build.gradle @@ -1,6 +1,11 @@ -import org.apache.tools.ant.filters.ReplaceTokens +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ -import org.elasticsearch.gradle.internal.info.BuildParams +import org.apache.tools.ant.filters.ReplaceTokens apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/smoke-test-plugins/build.gradle b/x-pack/qa/smoke-test-plugins/build.gradle index 427aa39f02e49..a51a67dd75b8a 100644 --- a/x-pack/qa/smoke-test-plugins/build.gradle +++ b/x-pack/qa/smoke-test-plugins/build.gradle @@ -1,5 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-yaml-rest-test' apply plugin: 'elasticsearch.rest-resources' diff --git a/x-pack/qa/third-party/jira/build.gradle b/x-pack/qa/third-party/jira/build.gradle index 626693a8f295f..7e3d0485545a6 100644 --- a/x-pack/qa/third-party/jira/build.gradle +++ b/x-pack/qa/third-party/jira/build.gradle @@ -1,10 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + import groovy.json.JsonSlurper import javax.net.ssl.HttpsURLConnection import java.nio.charset.StandardCharsets -import org.elasticsearch.gradle.internal.info.BuildParams - apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { diff --git a/x-pack/qa/third-party/pagerduty/build.gradle b/x-pack/qa/third-party/pagerduty/build.gradle index 86ed67ccbb2d6..20f7b9b654b66 100644 --- a/x-pack/qa/third-party/pagerduty/build.gradle +++ b/x-pack/qa/third-party/pagerduty/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' diff --git a/x-pack/qa/third-party/slack/build.gradle b/x-pack/qa/third-party/slack/build.gradle index ff501a7c99c9b..54821a9d2b71a 100644 --- a/x-pack/qa/third-party/slack/build.gradle +++ b/x-pack/qa/third-party/slack/build.gradle @@ -1,4 +1,9 @@ -import org.elasticsearch.gradle.internal.info.BuildParams +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ apply plugin: 'elasticsearch.legacy-yaml-rest-test' From b45564364b42cd32a5817d3cfb2597993d087e9e Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 22 Nov 2024 07:40:06 -0800 Subject: [PATCH 42/50] Add java version variants of entitlements checker (#116878) As each version of Java is released, there may be additional methods we want to instrument for entitlements. Since new methods won't exist in the base version of Java that Elasticsearch is compiled with, we need to hava different classes and compilation for each version. This commit adds a scaffolding for adding the classes for new versions of Java. Unfortunately it requires several classes in different locations. But hopefully these are infrequent enough that the boilerplate is ok. We could consider adding a helper Gradle task to templatize the new classes in the future if it is too cumbersome. Note that the example for Java23 does not have anything meaningful in it yet, it's only meant as an example until we find go through classes and methods that were added after Java 21. --- .../gradle/internal/MrjarPlugin.java | 10 +++++ .../impl/InstrumenterImpl.java | 24 +++++++---- libs/entitlement/bridge/build.gradle | 17 ++++---- .../bridge/EntitlementCheckerHandle.java | 25 +----------- .../entitlement/bridge/HandleLoader.java | 40 +++++++++++++++++++ .../bridge/Java23EntitlementChecker.java | 12 ++++++ .../Java23EntitlementCheckerHandle.java | 27 +++++++++++++ libs/entitlement/build.gradle | 12 +++++- .../EntitlementInitialization.java | 34 +++++++++++++++- ...Java23ElasticsearchEntitlementChecker.java | 26 ++++++++++++ 10 files changed, 184 insertions(+), 43 deletions(-) create mode 100644 libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java create mode 100644 libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java create mode 100644 libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java create mode 100644 libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java index d1585120b0803..7c488e6e73fee 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/MrjarPlugin.java @@ -21,6 +21,7 @@ import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.SourceSet; import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.TaskProvider; import org.gradle.api.tasks.compile.CompileOptions; import org.gradle.api.tasks.compile.JavaCompile; import org.gradle.api.tasks.javadoc.Javadoc; @@ -87,6 +88,7 @@ public void apply(Project project) { String mainSourceSetName = SourceSet.MAIN_SOURCE_SET_NAME + javaVersion; SourceSet mainSourceSet = addSourceSet(project, javaExtension, mainSourceSetName, mainSourceSets, javaVersion); configureSourceSetInJar(project, mainSourceSet, javaVersion); + addJar(project, mainSourceSet, javaVersion); mainSourceSets.add(mainSourceSetName); testSourceSets.add(mainSourceSetName); @@ -147,6 +149,14 @@ private SourceSet addSourceSet( return sourceSet; } + private void addJar(Project project, SourceSet sourceSet, int javaVersion) { + project.getConfigurations().register("java" + javaVersion); + TaskProvider jarTask = project.getTasks().register("java" + javaVersion + "Jar", Jar.class, task -> { + task.from(sourceSet.getOutput()); + }); + project.getArtifacts().add("java" + javaVersion, jarTask); + } + private void configurePreviewFeatures(Project project, SourceSet sourceSet, int javaVersion) { project.getTasks().withType(JavaCompile.class).named(sourceSet.getCompileJavaTaskName()).configure(compileTask -> { CompileOptions compileOptions = compileTask.getOptions(); diff --git a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java index 53e76372b107d..dc20b16400f3d 100644 --- a/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java +++ b/libs/entitlement/asm-provider/src/main/java/org/elasticsearch/entitlement/instrumentation/impl/InstrumenterImpl.java @@ -36,6 +36,22 @@ import static org.objectweb.asm.Opcodes.INVOKEVIRTUAL; public class InstrumenterImpl implements Instrumenter { + + private static final String checkerClassDescriptor; + private static final String handleClass; + static { + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + String checkerClass = "org/elasticsearch/entitlement/bridge/" + classNamePrefix + "EntitlementChecker"; + handleClass = checkerClass + "Handle"; + checkerClassDescriptor = Type.getObjectType(checkerClass).getDescriptor(); + } + /** * To avoid class name collisions during testing without an agent to replace classes in-place. */ @@ -269,13 +285,7 @@ private void invokeInstrumentationMethod() { } protected void pushEntitlementChecker(MethodVisitor mv) { - mv.visitMethodInsn( - INVOKESTATIC, - "org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle", - "instance", - "()Lorg/elasticsearch/entitlement/bridge/EntitlementChecker;", - false - ); + mv.visitMethodInsn(INVOKESTATIC, handleClass, "instance", "()" + checkerClassDescriptor, false); } public record ClassFileInfo(String fileName, byte[] bytecodes) {} diff --git a/libs/entitlement/bridge/build.gradle b/libs/entitlement/bridge/build.gradle index 3d59dd3eaf33e..a9f8f6e3a3b0a 100644 --- a/libs/entitlement/bridge/build.gradle +++ b/libs/entitlement/bridge/build.gradle @@ -7,19 +7,18 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' +apply plugin: 'elasticsearch.mrjar' -configurations { - bridgeJar { - canBeConsumed = true - canBeResolved = false +tasks.named('jar').configure { + // guarding for intellij + if (sourceSets.findByName("main23")) { + from sourceSets.main23.output } } -artifacts { - bridgeJar(jar) -} - -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java index 2fe4a163a4136..26c9c83b8eb51 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementCheckerHandle.java @@ -9,9 +9,6 @@ package org.elasticsearch.entitlement.bridge; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; - /** * Makes the {@link EntitlementChecker} available to injected bytecode. */ @@ -35,27 +32,7 @@ private static class Holder { * The {@code EntitlementInitialization} class is what actually instantiates it and makes it available; * here, we copy it into a static final variable for maximum performance. */ - private static final EntitlementChecker instance; - static { - String initClazz = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; - final Class clazz; - try { - clazz = ClassLoader.getSystemClassLoader().loadClass(initClazz); - } catch (ClassNotFoundException e) { - throw new AssertionError("java.base cannot find entitlement initialziation", e); - } - final Method checkerMethod; - try { - checkerMethod = clazz.getMethod("checker"); - } catch (NoSuchMethodException e) { - throw new AssertionError("EntitlementInitialization is missing checker() method", e); - } - try { - instance = (EntitlementChecker) checkerMethod.invoke(null); - } catch (IllegalAccessException | InvocationTargetException e) { - throw new AssertionError(e); - } - } + private static final EntitlementChecker instance = HandleLoader.load(EntitlementChecker.class); } // no construction diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java new file mode 100644 index 0000000000000..bbfec47884f79 --- /dev/null +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/HandleLoader.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; + +class HandleLoader { + + static T load(Class checkerClass) { + String initClassName = "org.elasticsearch.entitlement.initialization.EntitlementInitialization"; + final Class initClazz; + try { + initClazz = ClassLoader.getSystemClassLoader().loadClass(initClassName); + } catch (ClassNotFoundException e) { + throw new AssertionError("java.base cannot find entitlement initialization", e); + } + final Method checkerMethod; + try { + checkerMethod = initClazz.getMethod("checker"); + } catch (NoSuchMethodException e) { + throw new AssertionError("EntitlementInitialization is missing checker() method", e); + } + try { + return checkerClass.cast(checkerMethod.invoke(null)); + } catch (IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + + // no instance + private HandleLoader() {} +} diff --git a/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java new file mode 100644 index 0000000000000..244632e80ffa0 --- /dev/null +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementChecker.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +public interface Java23EntitlementChecker extends EntitlementChecker {} diff --git a/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java new file mode 100644 index 0000000000000..f41c5dcdf14fd --- /dev/null +++ b/libs/entitlement/bridge/src/main23/java/org/elasticsearch/entitlement/bridge/Java23EntitlementCheckerHandle.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.bridge; + +/** + * Java23 variant of {@link EntitlementChecker} handle holder. + */ +public class Java23EntitlementCheckerHandle { + + public static Java23EntitlementChecker instance() { + return Holder.instance; + } + + private static class Holder { + private static final Java23EntitlementChecker instance = HandleLoader.load(Java23EntitlementChecker.class); + } + + // no construction + private Java23EntitlementCheckerHandle() {} +} diff --git a/libs/entitlement/build.gradle b/libs/entitlement/build.gradle index 12e0bb48a54b7..841591873153c 100644 --- a/libs/entitlement/build.gradle +++ b/libs/entitlement/build.gradle @@ -6,10 +6,13 @@ * your election, the "Elastic License 2.0", the "GNU Affero General Public * License v3.0 only", or the "Server Side Public License, v 1". */ + +import org.elasticsearch.gradle.internal.precommit.CheckForbiddenApisTask + apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.publish' - apply plugin: 'elasticsearch.embedded-providers' +apply plugin: 'elasticsearch.mrjar' embeddedProviders { impl 'entitlement', project(':libs:entitlement:asm-provider') @@ -23,8 +26,13 @@ dependencies { testImplementation(project(":test:framework")) { exclude group: 'org.elasticsearch', module: 'entitlement' } + + // guarding for intellij + if (sourceSets.findByName("main23")) { + main23CompileOnly project(path: ':libs:entitlement:bridge', configuration: 'java23') + } } -tasks.named('forbiddenApisMain').configure { +tasks.withType(CheckForbiddenApisTask).configureEach { replaceSignatureFiles 'jdk-signatures' } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 6d31abe4cf054..ca57e7b255bca 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -27,6 +27,8 @@ import java.lang.instrument.Instrumentation; import java.lang.module.ModuleFinder; import java.lang.module.ModuleReference; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; @@ -59,7 +61,7 @@ public static EntitlementChecker checker() { // Note: referenced by agent reflectively public static void initialize(Instrumentation inst) throws Exception { - manager = new ElasticsearchEntitlementChecker(createPolicyManager()); + manager = initChecker(); Map methodMap = INSTRUMENTER_FACTORY.lookupMethodsToInstrument( "org.elasticsearch.entitlement.bridge.EntitlementChecker" @@ -137,6 +139,36 @@ private static Set getModuleNames(Path pluginRoot, boolean isModular) { return Set.of(ALL_UNNAMED); } + private static ElasticsearchEntitlementChecker initChecker() throws IOException { + final PolicyManager policyManager = createPolicyManager(); + + int javaVersion = Runtime.version().feature(); + final String classNamePrefix; + if (javaVersion >= 23) { + classNamePrefix = "Java23"; + } else { + classNamePrefix = ""; + } + final String className = "org.elasticsearch.entitlement.runtime.api." + classNamePrefix + "ElasticsearchEntitlementChecker"; + Class clazz; + try { + clazz = Class.forName(className); + } catch (ClassNotFoundException e) { + throw new AssertionError("entitlement lib cannot find entitlement impl", e); + } + Constructor constructor; + try { + constructor = clazz.getConstructor(PolicyManager.class); + } catch (NoSuchMethodException e) { + throw new AssertionError("entitlement impl is missing no arg constructor", e); + } + try { + return (ElasticsearchEntitlementChecker) constructor.newInstance(policyManager); + } catch (IllegalAccessException | InvocationTargetException | InstantiationException e) { + throw new AssertionError(e); + } + } + private static String internalName(Class c) { return c.getName().replace('.', '/'); } diff --git a/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java new file mode 100644 index 0000000000000..d0f9f4f48609c --- /dev/null +++ b/libs/entitlement/src/main23/java/org/elasticsearch/entitlement/runtime/api/Java23ElasticsearchEntitlementChecker.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.api; + +import org.elasticsearch.entitlement.bridge.Java23EntitlementChecker; +import org.elasticsearch.entitlement.runtime.policy.PolicyManager; + +public class Java23ElasticsearchEntitlementChecker extends ElasticsearchEntitlementChecker implements Java23EntitlementChecker { + + public Java23ElasticsearchEntitlementChecker(PolicyManager policyManager) { + super(policyManager); + } + + @Override + public void check$java_lang_System$exit(Class callerClass, int status) { + // TODO: this is just an example, we shouldn't really override a method implemented in the superclass + super.check$java_lang_System$exit(callerClass, status); + } +} From 080359873352660f71265bcfddd532d079cf6ff0 Mon Sep 17 00:00:00 2001 From: Ankita Kumar Date: Fri, 22 Nov 2024 10:52:15 -0500 Subject: [PATCH 43/50] Update test (#117202) --- .../elasticsearch/index/reindex/ReindexNodeShutdownIT.java | 7 ++++--- muted-tests.yml | 3 --- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java index 4a001bb2d0969..a4b030e3c793f 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/ReindexNodeShutdownIT.java @@ -35,7 +35,7 @@ * The test works as follows: * 1. Start a large (reasonably long running) reindexing request on the coordinator-only node. * 2. Check that the reindexing task appears on the coordinating node - * 3. With a 10s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, + * 3. With a 60s timeout value for MAXIMUM_REINDEXING_TIMEOUT_SETTING, * wait for the reindexing task to complete before closing the node * 4. Confirm that the reindexing task succeeds with the wait (it will fail without it) */ @@ -58,8 +58,9 @@ public void testReindexWithShutdown() throws Exception { final String masterNodeName = internalCluster().startMasterOnlyNode(); final String dataNodeName = internalCluster().startDataOnlyNode(); + /* Maximum time to wait for reindexing tasks to complete before shutdown */ final Settings COORD_SETTINGS = Settings.builder() - .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(10)) + .put(MAXIMUM_REINDEXING_TIMEOUT_SETTING.getKey(), TimeValue.timeValueSeconds(60)) .build(); final String coordNodeName = internalCluster().startCoordinatingOnlyNode(Settings.EMPTY); @@ -118,7 +119,7 @@ public void onFailure(Exception e) { internalCluster().stopNode(coordNodeName); } - // Make sure all documents from the source index have been reindexed into the destination index + // Make sure all documents from the source index have been re-indexed into the destination index private void checkDestinationIndex(String dataNodeName, int numDocs) throws Exception { assertTrue(indexExists(DEST_INDEX)); flushAndRefresh(DEST_INDEX); diff --git a/muted-tests.yml b/muted-tests.yml index fcc3b6b6e9b12..f5cf062d519ea 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -127,9 +127,6 @@ tests: - class: org.elasticsearch.search.SearchServiceTests method: testParseSourceValidation issue: https://github.com/elastic/elasticsearch/issues/115936 -- class: org.elasticsearch.index.reindex.ReindexNodeShutdownIT - method: testReindexWithShutdown - issue: https://github.com/elastic/elasticsearch/issues/115996 - class: org.elasticsearch.search.query.SearchQueryIT method: testAllDocsQueryString issue: https://github.com/elastic/elasticsearch/issues/115728 From 94c3e8226bcf559ca93752bd9cb4a0eb6cc08361 Mon Sep 17 00:00:00 2001 From: Ying Mao Date: Fri, 22 Nov 2024 11:10:34 -0500 Subject: [PATCH 44/50] Fixing bug setting index when parsing Google Vertex AI results (#117287) * Using record ID as index value when parsing Google Vertex AI rerank results * Update docs/changelog/117287.yaml * PR feedback --- docs/changelog/117287.yaml | 5 +++ .../GoogleVertexAiRerankResponseEntity.java | 28 ++++++++++++-- ...ogleVertexAiRerankResponseEntityTests.java | 37 ++++++++++++++++++- 3 files changed, 65 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/117287.yaml diff --git a/docs/changelog/117287.yaml b/docs/changelog/117287.yaml new file mode 100644 index 0000000000000..08da9dd8087b2 --- /dev/null +++ b/docs/changelog/117287.yaml @@ -0,0 +1,5 @@ +pr: 117287 +summary: Fixing bug setting index when parsing Google Vertex AI results +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java index 24946ee5875a5..78673277797d2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntity.java @@ -30,6 +30,8 @@ public class GoogleVertexAiRerankResponseEntity { private static final String FAILED_TO_FIND_FIELD_TEMPLATE = "Failed to find required field [%s] in Google Vertex AI rerank response"; + private static final String INVALID_ID_FIELD_FORMAT_TEMPLATE = "Expected numeric value for record ID field in Google Vertex AI rerank " + + "response but received [%s]"; /** * Parses the Google Vertex AI rerank response. @@ -109,14 +111,27 @@ private static List doParse(XContentParser parser) throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.SCORE.getPreferredName())); } - return new RankedDocsResults.RankedDoc(index, parsedRankedDoc.score, parsedRankedDoc.content); + if (parsedRankedDoc.id == null) { + throw new IllegalStateException(format(FAILED_TO_FIND_FIELD_TEMPLATE, RankedDoc.ID.getPreferredName())); + } + + try { + return new RankedDocsResults.RankedDoc( + Integer.parseInt(parsedRankedDoc.id), + parsedRankedDoc.score, + parsedRankedDoc.content + ); + } catch (NumberFormatException e) { + throw new IllegalStateException(format(INVALID_ID_FIELD_FORMAT_TEMPLATE, parsedRankedDoc.id)); + } }); } - private record RankedDoc(@Nullable Float score, @Nullable String content) { + private record RankedDoc(@Nullable Float score, @Nullable String content, @Nullable String id) { private static final ParseField CONTENT = new ParseField("content"); private static final ParseField SCORE = new ParseField("score"); + private static final ParseField ID = new ParseField("id"); private static final ObjectParser PARSER = new ObjectParser<>( "google_vertex_ai_rerank_response", true, @@ -126,6 +141,7 @@ private record RankedDoc(@Nullable Float score, @Nullable String content) { static { PARSER.declareString(Builder::setContent, CONTENT); PARSER.declareFloat(Builder::setScore, SCORE); + PARSER.declareString(Builder::setId, ID); } public static RankedDoc parse(XContentParser parser) { @@ -137,6 +153,7 @@ private static final class Builder { private String content; private Float score; + private String id; private Builder() {} @@ -150,8 +167,13 @@ public Builder setContent(String content) { return this; } + public Builder setId(String id) { + this.id = id; + return this; + } + public RankedDoc build() { - return new RankedDoc(score, content); + return new RankedDoc(score, content, id); } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java index 32450e3facfd0..7ff79e2618425 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/googlevertexai/GoogleVertexAiRerankResponseEntityTests.java @@ -39,7 +39,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); - assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2")))); + assertThat(parsedResults.getRankedDocs(), is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2")))); } public void testFromResponse_CreatesResultsForMultipleItems() throws IOException { @@ -68,7 +68,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException assertThat( parsedResults.getRankedDocs(), - is(List.of(new RankedDocsResults.RankedDoc(0, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) + is(List.of(new RankedDocsResults.RankedDoc(2, 0.97F, "content 2"), new RankedDocsResults.RankedDoc(1, 0.90F, "content 1"))) ); } @@ -161,4 +161,37 @@ public void testFromResponse_FailsWhenScoreFieldIsNotPresent() { assertThat(thrownException.getMessage(), is("Failed to find required field [score] in Google Vertex AI rerank response")); } + + public void testFromResponse_FailsWhenIDFieldIsNotInteger() { + String responseJson = """ + { + "records": [ + { + "id": "abcd", + "title": "title 2", + "content": "content 2", + "score": 0.97 + }, + { + "id": "1", + "title": "title 1", + "content": "content 1", + "score": 0.96 + } + ] + } + """; + + var thrownException = expectThrows( + IllegalStateException.class, + () -> GoogleVertexAiRerankResponseEntity.fromResponse( + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ) + ); + + assertThat( + thrownException.getMessage(), + is("Expected numeric value for record ID field in Google Vertex AI rerank response but received [abcd]") + ); + } } From 893dfd3c9aa3ac8c11d56e063f30ca7acea733bd Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Nov 2024 11:28:06 -0500 Subject: [PATCH 45/50] ESQL: Make WEIGHTED_AVG not preview (#117356) It's not PREVIEW. --- docs/reference/esql/functions/aggregation-functions.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/esql/functions/aggregation-functions.asciidoc b/docs/reference/esql/functions/aggregation-functions.asciidoc index 7cdc42ea6cbf9..3a27e1944a684 100644 --- a/docs/reference/esql/functions/aggregation-functions.asciidoc +++ b/docs/reference/esql/functions/aggregation-functions.asciidoc @@ -20,7 +20,7 @@ The <> command supports these aggregate functions: * <> * <> * <> -* experimental:[] <> +* <> // end::agg_list[] include::layout/avg.asciidoc[] From 9b4c89d50763984ce1059d6cc1412336c7b7c3de Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Fri, 22 Nov 2024 09:04:35 -0800 Subject: [PATCH 46/50] Don't run the DRA staging build on the 8.x branch (#117355) --- .buildkite/scripts/dra-workflow.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index 81b8225e443a4..f2dc40ca1927f 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -6,7 +6,7 @@ WORKFLOW="${DRA_WORKFLOW:-snapshot}" BRANCH="${BUILDKITE_BRANCH:-}" # Don't publish main branch to staging -if [[ "$BRANCH" == "main" && "$WORKFLOW" == "staging" ]]; then +if [[ ("$BRANCH" == "main" || "$BRANCH" == *.x) && "$WORKFLOW" == "staging" ]]; then exit 0 fi From f325c1541088995f35e7d39cf181a9b970d3c90a Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Fri, 22 Nov 2024 11:00:26 -0700 Subject: [PATCH 47/50] FIx async search tests - do not warn on the presence of .async-search (#117301) --- muted-tests.yml | 3 --- .../test/rest/ESRestTestCase.java | 25 +++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index f5cf062d519ea..c4af8bf1c0d21 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -214,9 +214,6 @@ tests: - class: org.elasticsearch.upgrades.QueryBuilderBWCIT method: testQueryBuilderBWC {cluster=UPGRADED} issue: https://github.com/elastic/elasticsearch/issues/116990 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/esql/esql-across-clusters/line_197} - issue: https://github.com/elastic/elasticsearch/issues/117099 - class: org.elasticsearch.xpack.apmdata.APMYamlTestSuiteIT method: test {yaml=/10_apm/Test template reinstallation} issue: https://github.com/elastic/elasticsearch/issues/116445 diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index dd08107bd67fb..a4195a07e7621 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1131,6 +1131,7 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } final Request deleteRequest = new Request("DELETE", Strings.collectionToCommaDelimitedString(indexPatterns)); deleteRequest.addParameter("expand_wildcards", "open,closed,hidden"); + deleteRequest.setOptions(deleteRequest.getOptions().toBuilder().setWarningsHandler(ignoreAsyncSearchWarning()).build()); final Response response = adminClient().performRequest(deleteRequest); try (InputStream is = response.getEntity().getContent()) { assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged")); @@ -1143,6 +1144,30 @@ protected static void wipeAllIndices(boolean preserveSecurityIndices) throws IOE } } + // Make warnings handler that ignores the .async-search warning since .async-search may randomly appear when async requests are slow + // See: https://github.com/elastic/elasticsearch/issues/117099 + protected static WarningsHandler ignoreAsyncSearchWarning() { + return new WarningsHandler() { + @Override + public boolean warningsShouldFailRequest(List warnings) { + if (warnings.isEmpty()) { + return false; + } + return warnings.equals( + List.of( + "this request accesses system indices: [.async-search], " + + "but in a future major version, direct access to system indices will be prevented by default" + ) + ) == false; + } + + @Override + public String toString() { + return "ignore .async-search warning"; + } + }; + } + protected static void wipeDataStreams() throws IOException { try { if (hasXPack()) { From dae59da5f969a57fc18172c605914591902381a2 Mon Sep 17 00:00:00 2001 From: Oleksandr Kolomiiets Date: Fri, 22 Nov 2024 10:15:54 -0800 Subject: [PATCH 48/50] Fix constand_keyword test run and properly test recent behavior change (#117284) --- .../index/mapper/MapperFeatures.java | 7 ++++- .../mapper-constant-keyword/build.gradle | 2 +- .../ConstantKeywordClientYamlTestSuiteIT.java | 10 +++++++ .../test/20_synthetic_source.yml | 26 +++++++++++++++++-- 4 files changed, 41 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 5743baeec536d..333c37381c587 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -57,6 +57,10 @@ public Set getFeatures() { ); } + public static final NodeFeature CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX = new NodeFeature( + "mapper.constant_keyword.synthetic_source_write_fix" + ); + @Override public Set getTestFeatures() { return Set.of( @@ -66,7 +70,8 @@ public Set getTestFeatures() { SourceFieldMapper.SOURCE_MODE_FROM_INDEX_SETTING, IgnoredSourceFieldMapper.IGNORED_SOURCE_AS_TOP_LEVEL_METADATA_ARRAY_FIELD, IgnoredSourceFieldMapper.ALWAYS_STORE_OBJECT_ARRAYS_IN_NESTED_OBJECTS, - MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT + MapperService.LOGSDB_DEFAULT_IGNORE_DYNAMIC_BEYOND_LIMIT, + CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX ); } } diff --git a/x-pack/plugin/mapper-constant-keyword/build.gradle b/x-pack/plugin/mapper-constant-keyword/build.gradle index 4f50246450f3f..c1e0eb61b611b 100644 --- a/x-pack/plugin/mapper-constant-keyword/build.gradle +++ b/x-pack/plugin/mapper-constant-keyword/build.gradle @@ -6,7 +6,7 @@ */ apply plugin: 'elasticsearch.internal-es-plugin' -apply plugin: 'elasticsearch.legacy-yaml-rest-test' +apply plugin: 'elasticsearch.internal-yaml-rest-test' esplugin { name 'constant-keyword' diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java index 789059d9e11c0..5b6048b481abf 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/java/org/elasticsearch/xpack/constantkeyword/ConstantKeywordClientYamlTestSuiteIT.java @@ -10,8 +10,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.junit.ClassRule; /** Runs yaml rest tests */ public class ConstantKeywordClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -24,4 +26,12 @@ public ConstantKeywordClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidat public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local().module("constant-keyword").build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml index d40f69f483dbb..012b1006b8d20 100644 --- a/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml +++ b/x-pack/plugin/mapper-constant-keyword/src/yamlRestTest/resources/rest-api-spec/test/20_synthetic_source.yml @@ -1,7 +1,7 @@ constant_keyword: - requires: - cluster_features: [ "mapper.source.mode_from_index_setting" ] - reason: "Source mode configured through index setting" + cluster_features: [ "mapper.constant_keyword.synthetic_source_write_fix" ] + reason: "Behavior fix" - do: indices.create: @@ -26,6 +26,15 @@ constant_keyword: body: kwd: foo + - do: + index: + index: test + id: 2 + refresh: true + body: + kwd: foo + const_kwd: bar + - do: search: index: test @@ -33,6 +42,19 @@ constant_keyword: query: ids: values: [1] + + - match: + hits.hits.0._source: + kwd: foo + + - do: + search: + index: test + body: + query: + ids: + values: [2] + - match: hits.hits.0._source: kwd: foo From f3eb27e234568699020c720f53fb33084030dbc9 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Fri, 22 Nov 2024 20:48:51 +0200 Subject: [PATCH 49/50] ESQL: use field_caps native nested fields filtering (#117201) * Just filter the nested fields natively with field_caps support --------- Co-authored-by: Elastic Machine Co-authored-by: Craig Taverner --- docs/changelog/117201.yaml | 6 + .../esql/qa/rest/FieldExtractorTestCase.java | 318 ++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 7 +- .../xpack/esql/session/IndexResolver.java | 25 +- 4 files changed, 334 insertions(+), 22 deletions(-) create mode 100644 docs/changelog/117201.yaml diff --git a/docs/changelog/117201.yaml b/docs/changelog/117201.yaml new file mode 100644 index 0000000000000..f8a2be35c70a3 --- /dev/null +++ b/docs/changelog/117201.yaml @@ -0,0 +1,6 @@ +pr: 117201 +summary: "Use `field_caps` native nested fields filtering" +area: ES|QL +type: bug +issues: + - 117054 diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java index 6f45c9d92fd12..813354db697e1 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/FieldExtractorTestCase.java @@ -28,6 +28,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.hamcrest.Matcher; import org.junit.Before; @@ -1107,6 +1108,323 @@ public void testTypeConflictInObject() throws IOException { ); } + /** + * Test for https://github.com/elastic/elasticsearch/issues/117054 fix + */ + public void testOneNestedSubField_AndSameNameSupportedField() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + + Map result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", Collections.EMPTY_LIST) + ); + + index("test", """ + {"Responses.process.pid": 123,"process.parent.command_line":"run.bat"}"""); + + result = runEsql("FROM test"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:18: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedSubField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 123}"""); + index("test2", """ + {"process.parent.command_line":"run.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line ASC NULLS FIRST"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item(null).item(null), matchesList().item("run.bat").item("run.bat"))) + ); + + result = runEsql(""" + FROM test* | where process.parent.command_line == "run.bat" + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of(columnInfo("process.parent.command_line", "keyword"), columnInfo("process.parent.command_line.text", "text")) + ).entry("values", List.of(matchesList().item("run.bat").item("run.bat"))) + ); + + ResponseException e = expectThrows(ResponseException.class, () -> runEsql("FROM test* | SORT Responses.process.pid")); + String err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 1:19: Unknown column [Responses.process.pid]")); + + e = expectThrows(ResponseException.class, () -> runEsql(""" + FROM test* + | SORT Responses.process.pid + | WHERE Responses.process IS NULL + """)); + err = EntityUtils.toString(e.getResponse().getEntity()); + assertThat(err, containsString("line 2:8: Unknown column [Responses.process.pid]")); + } + + public void testOneNestedField_AndSameNameSupportedField_TwoIndices() throws IOException { + assumeIndexResolverNestedFieldsNameClashFixed(); + ESRestTestCase.createIndex("test1", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "nested", + "properties": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + ESRestTestCase.createIndex("test2", Settings.EMPTY, """ + "properties": { + "Responses": { + "properties": { + "process": { + "type": "integer", + "fields": { + "pid": { + "type": "long" + } + } + } + } + }, + "process": { + "properties": { + "parent": { + "properties": { + "command_line": { + "type": "wildcard", + "fields": { + "text": { + "type": "text" + } + } + } + } + } + } + } + } + """); + index("test1", """ + {"Responses.process.pid": 111,"process.parent.command_line":"run1.bat"}"""); + index("test2", """ + {"Responses.process": 222,"process.parent.command_line":"run2.bat"}"""); + + Map result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* | where Responses.process.pid == 111 + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of()) + ); + + result = runEsql("FROM test* | SORT process.parent.command_line"); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ) + .entry( + "values", + List.of( + matchesList().item(null).item(null).item("run1.bat").item("run1.bat"), + matchesList().item(222).item(222).item("run2.bat").item("run2.bat") + ) + ) + ); + + result = runEsql(""" + FROM test* + | SORT process.parent.command_line + | WHERE Responses.process IS NULL + """); + assertMap( + result, + matchesMapWithOptionalTook(result.get("took")).entry( + "columns", + List.of( + columnInfo("Responses.process", "integer"), + columnInfo("Responses.process.pid", "long"), + columnInfo("process.parent.command_line", "keyword"), + columnInfo("process.parent.command_line.text", "text") + ) + ).entry("values", List.of(matchesList().item(null).item(null).item("run1.bat").item("run1.bat"))) + ); + } + + private void assumeIndexResolverNestedFieldsNameClashFixed() throws IOException { + // especially for BWC tests but also for regular tests + var capsName = EsqlCapabilities.Cap.FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER.name().toLowerCase(Locale.ROOT); + boolean requiredClusterCapability = clusterHasCapability("POST", "/_query", List.of(), List.of(capsName)).orElse(false); + assumeTrue( + "This test makes sense for versions that have the fix for https://github.com/elastic/elasticsearch/issues/117054", + requiredClusterCapability + ); + } + private CheckedConsumer empNoInObject(String empNoType) { return index -> { index.startObject("properties"); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index c33acf95aa33f..54b02c87b285b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -511,7 +511,12 @@ public enum Cap { /** * LOOKUP JOIN */ - JOIN_LOOKUP(Build.current().isSnapshot()); + JOIN_LOOKUP(Build.current().isSnapshot()), + + /** + * Fix for https://github.com/elastic/elasticsearch/issues/117054 + */ + FIX_NESTED_FIELDS_NAME_CLASH_IN_INDEXRESOLVER; private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java index 0be8cf820d345..f61be4b59830e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/IndexResolver.java @@ -98,9 +98,8 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp // TODO flattened is simpler - could we get away with that? String[] names = fieldsCaps.keySet().toArray(new String[0]); Arrays.sort(names); - Set forbiddenFields = new HashSet<>(); Map rootFields = new HashMap<>(); - name: for (String name : names) { + for (String name : names) { Map fields = rootFields; String fullName = name; boolean isAlias = false; @@ -111,9 +110,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp break; } String parent = name.substring(0, nextDot); - if (forbiddenFields.contains(parent)) { - continue name; - } EsField obj = fields.get(parent); if (obj == null) { obj = new EsField(parent, OBJECT, new HashMap<>(), false, true); @@ -125,16 +121,10 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp fields = obj.getProperties(); name = name.substring(nextDot + 1); } - - List caps = fieldsCaps.get(fullName); - if (allNested(caps)) { - forbiddenFields.add(name); - continue; - } // TODO we're careful to make isAlias match IndexResolver - but do we use it? EsField field = firstUnsupportedParent == null - ? createField(fieldCapsResponse, name, fullName, caps, isAlias) + ? createField(fieldCapsResponse, name, fullName, fieldsCaps.get(fullName), isAlias) : new UnsupportedEsField( fullName, firstUnsupportedParent.getOriginalType(), @@ -164,15 +154,6 @@ public IndexResolution mergedMappings(String indexPattern, FieldCapabilitiesResp return IndexResolution.valid(new EsIndex(indexPattern, rootFields, concreteIndices), concreteIndices.keySet(), unavailableRemotes); } - private boolean allNested(List caps) { - for (IndexFieldCapabilities cap : caps) { - if (false == cap.type().equalsIgnoreCase("nested")) { - return false; - } - } - return true; - } - private static Map> collectFieldCaps(FieldCapabilitiesResponse fieldCapsResponse) { Set seenHashes = new HashSet<>(); Map> fieldsCaps = new HashMap<>(); @@ -278,6 +259,8 @@ private static FieldCapabilitiesRequest createFieldCapsRequest(String index, Set // lenient because we throw our own errors looking at the response e.g. if something was not resolved // also because this way security doesn't throw authorization exceptions but rather honors ignore_unavailable req.indicesOptions(FIELD_CAPS_INDICES_OPTIONS); + // we ignore the nested data type fields starting with https://github.com/elastic/elasticsearch/pull/111495 + req.filters("-nested"); req.setMergeResults(false); return req; } From 34d96526f95c9924a48722db8be0f7be31c1bfc4 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 22 Nov 2024 14:21:42 -0500 Subject: [PATCH 50/50] ESQL: Fix limit task test (#117270) Fix a test for the task results when running the `LIMIT` operation. We were releasing a few permits to get the query started. And when you combine that with the page worth of permits that the test was releasing we'd sometimes finish the entire limited query, stopping the task too early to find a running task. Closes #107293 --- .../xpack/esql/action/EsqlActionTaskIT.java | 20 ++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index cde4f10ef556c..e6db79c7d8abd 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -79,6 +79,11 @@ public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { private String REDUCE_DESCRIPTION; private boolean nodeLevelReduction; + /** + * Number of docs released by {@link #startEsql}. + */ + private int prereleasedDocs; + @Before public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); @@ -104,6 +109,7 @@ public void testTaskContents() throws Exception { ActionFuture response = startEsql(); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); List foundTasks = getTasksRunning(); int luceneSources = 0; @@ -216,9 +222,15 @@ private ActionFuture startEsql() { return startEsql("from test | stats sum(pause_me)"); } + /** + * Start an ESQL query, releasing a few docs from the {@code pause_me} + * script so it'll actually start but won't finish it's first page. + */ private ActionFuture startEsql(String query) { scriptPermits.drainPermits(); - scriptPermits.release(between(1, 5)); + // Allow a few docs to calculate os the query gets "started" + prereleasedDocs = between(1, pageSize() / 2); + scriptPermits.release(prereleasedDocs); var settingsBuilder = Settings.builder() // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. .put("data_partitioning", "shard") @@ -444,6 +456,7 @@ public void testTaskContentsForTopNQuery() throws Exception { ActionFuture response = startEsql("from test | sort pause_me | keep pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally { @@ -455,7 +468,6 @@ public void testTaskContentsForTopNQuery() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/107293") public void testTaskContentsForLimitQuery() throws Exception { String limit = Integer.toString(randomIntBetween(pageSize() + 1, 2 * numberOfDocs())); READ_DESCRIPTION = """ @@ -475,7 +487,8 @@ public void testTaskContentsForLimitQuery() throws Exception { ActionFuture response = startEsql("from test | keep pause_me | limit " + limit); try { getTasksStarting(); - scriptPermits.release(pageSize()); + logger.info("unblocking script"); + scriptPermits.release(pageSize() - prereleasedDocs); getTasksRunning(); } finally { scriptPermits.release(numberOfDocs()); @@ -504,6 +517,7 @@ public void testTaskContentsForGroupingStatsQuery() throws Exception { ActionFuture response = startEsql("from test | stats max(foo) by pause_me"); try { getTasksStarting(); + logger.info("unblocking script"); scriptPermits.release(pageSize()); getTasksRunning(); } finally {