From e6ad3559796f9eb0238199a4ceb70dd3352195f0 Mon Sep 17 00:00:00 2001 From: Ticheng Lin Date: Fri, 11 Aug 2023 03:09:06 +0000 Subject: [PATCH 1/9] Add support for query profiler with concurrent aggregation (#9248) Signed-off-by: Ticheng Lin --- CHANGELOG.md | 1 + .../search/internal/ContextIndexSearcher.java | 4 + .../profile/AbstractInternalProfileTree.java | 5 +- .../profile/AbstractProfileBreakdown.java | 15 +- .../profile/ContextualProfileBreakdown.java | 8 + .../ConcurrentQueryProfileBreakdown.java | 192 +++++++++++++++++- .../search/profile/query/ProfileWeight.java | 8 + .../ConcurrentQueryProfileBreakdownTests.java | 158 ++++++++++++++ 8 files changed, 385 insertions(+), 6 deletions(-) create mode 100644 server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java diff --git a/CHANGELOG.md b/CHANGELOG.md index cc961df02dc8b..3d282bd2e6348 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -180,6 +180,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Use non-concurrent path for sort request on timeseries index and field([#9562](https://github.com/opensearch-project/OpenSearch/pull/9562)) - Added sampler based on `Blanket Probabilistic Sampling rate` and `Override for on demand` ([#9621](https://github.com/opensearch-project/OpenSearch/issues/9621)) - [Remote Store] Add support for Remote Translog Store stats in `_remotestore/stats/` API ([#9263](https://github.com/opensearch-project/OpenSearch/pull/9263)) +- Add support for query profiler with concurrent aggregation ([#9248](https://github.com/opensearch-project/OpenSearch/pull/9248)) ### Deprecated diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index 62e85fd93de60..acd61f1a94679 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -299,6 +299,10 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto final LeafCollector leafCollector; try { cancellable.checkCancelled(); + if (weight instanceof ProfileWeight) { + ((ProfileWeight) weight).associateLeafToCollector(ctx, collector); + ((ProfileWeight) weight).buildCollectorList(collector); + } weight = wrapWeight(weight); // See please https://github.com/apache/lucene/pull/964 collector.setWeight(weight); diff --git a/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java b/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java index 4d0949624ebed..ea93a80a4d552 100644 --- a/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java @@ -186,7 +186,10 @@ private ProfileResult doGetTree(int token) { breakdown.toBreakdownMap(), breakdown.toDebugMap(), breakdown.toNodeTime(), - childrenProfileResults + childrenProfileResults, + breakdown.getMaxSliceNodeTime(), + breakdown.getMinSliceNodeTime(), + breakdown.getAvgSliceNodeTime() ); } diff --git a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java index 67ab062c0e3ca..e7966aa90425c 100644 --- a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java @@ -80,6 +80,7 @@ public Map toBreakdownMap() { for (T timingType : this.timingTypes) { map.put(timingType.toString(), this.timings[timingType.ordinal()].getApproximateTiming()); map.put(timingType + TIMING_TYPE_COUNT_SUFFIX, this.timings[timingType.ordinal()].getCount()); + map.put(timingType + TIMING_TYPE_START_TIME_SUFFIX, this.timings[timingType.ordinal()].getEarliestTimerStartTime()); } return Collections.unmodifiableMap(map); } @@ -91,11 +92,23 @@ protected Map toDebugMap() { return emptyMap(); } - public final long toNodeTime() { + public long toNodeTime() { long total = 0; for (T timingType : timingTypes) { total += timings[timingType.ordinal()].getApproximateTiming(); } return total; } + + public Long getMaxSliceNodeTime() { + return null; + } + + public Long getMinSliceNodeTime() { + return null; + } + + public Long getAvgSliceNodeTime() { + return null; + } } diff --git a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java index 4f071f730cd45..d3e5c8089aeea 100644 --- a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java @@ -8,6 +8,9 @@ package org.opensearch.search.profile; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; + /** * Provide contextual profile breakdowns which are associated with freestyle context. Used when concurrent * search over segments is activated and each collector needs own non-shareable profile breakdown instance. @@ -15,6 +18,7 @@ * @opensearch.internal */ public abstract class ContextualProfileBreakdown> extends AbstractProfileBreakdown { + public ContextualProfileBreakdown(Class clazz) { super(clazz); } @@ -25,4 +29,8 @@ public ContextualProfileBreakdown(Class clazz) { * @return contextual profile breakdown instance */ public abstract AbstractProfileBreakdown context(Object context); + + public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) {} + + public void buildCollectorList(Collector collector) {} } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 6f0c78e8b307d..2d4059d104bcb 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -8,10 +8,14 @@ package org.opensearch.search.profile.query; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; import org.opensearch.search.profile.AbstractProfileBreakdown; import org.opensearch.search.profile.ContextualProfileBreakdown; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -23,7 +27,22 @@ * @opensearch.internal */ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBreakdown { + public static final String MAX_END_TIME_SUFFIX = "_max_end_time"; + public static final String MIN_START_TIME_SUFFIX = "_min_start_time"; + private static final String AT = "@"; + private static final String LATEST_LEAF_END_TIME = "latest_leaf_end_time"; + private static final String MAX_PREFIX = "max_"; + private static final String MIN_PREFIX = "min_"; + private static final String AVG_PREFIX = "avg_"; + private static long breakdownMaxEndTime = 0; + private static long breakdownMinStartTime = Long.MAX_VALUE; + private static long maxSliceNodeTime = Long.MIN_VALUE; + private static long minSliceNodeTime = Long.MAX_VALUE; + private static long avgSliceNodeTime = 0L; + private static long nodeTimeSum = 0L; private final Map> contexts = new ConcurrentHashMap<>(); + private List collectorList = new ArrayList<>(); + private Map leafToCollector = new HashMap<>(); /** Sole constructor. */ public ConcurrentQueryProfileBreakdown() { @@ -46,12 +65,177 @@ public AbstractProfileBreakdown context(Object context) { public Map toBreakdownMap() { final Map map = new HashMap<>(super.toBreakdownMap()); - for (final AbstractProfileBreakdown context : contexts.values()) { - for (final Map.Entry entry : context.toBreakdownMap().entrySet()) { - map.merge(entry.getKey(), entry.getValue(), Long::sum); + for (final Map.Entry> context : contexts.entrySet()) { + Map breakdown = context.getValue().toBreakdownMap(); + String collector = leafToCollector.get(context.getKey()); + for (final Map.Entry entry : breakdown.entrySet()) { + String breakdownType = entry.getKey(); + Long breakdownValue = entry.getValue(); + // Adding breakdown type count + if (breakdownType.contains(TIMING_TYPE_COUNT_SUFFIX)) { + map.merge(breakdownType, breakdownValue, Long::sum); + } + // Adding slice level breakdown + if (!breakdownType.contains(TIMING_TYPE_START_TIME_SUFFIX)) { + String sliceLevelBreakdownType = breakdownType + AT + collector; + map.put(sliceLevelBreakdownType, map.getOrDefault(sliceLevelBreakdownType, 0L) + breakdownValue); + } } + addMaxEndTimeAndMinStartTime(map, breakdown); + // Adding slice level latest leaf end time + long currentLeafEndTime = 0L; + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingType = queryTimingType.toString(); + String timingTypeStartTime = timingType + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; + currentLeafEndTime = Math.max(currentLeafEndTime, breakdown.get(timingTypeStartTime) + breakdown.get(timingType)); + } + String latestLeafEndTime = LATEST_LEAF_END_TIME + AT + collector; + map.put(latestLeafEndTime, Math.max(map.getOrDefault(latestLeafEndTime, 0L), currentLeafEndTime)); } + return buildQueryProfileBreakdownMap(map); + } + + /** + * This method is used to bring the max end time and min start time fields to the query profile breakdown. + * + * @param map the query level timing count breakdown for current instance + * @param breakdown the segment level timing count breakdown for current instance + */ + void addMaxEndTimeAndMinStartTime(Map map, Map breakdown) { + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingType = queryTimingType.toString(); + String timingTypeStartTime = timingType + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; + long currentMaxEndTime = map.getOrDefault(timingType + MAX_END_TIME_SUFFIX, 0L); + long currentMinStartTime = map.getOrDefault(timingType + MIN_START_TIME_SUFFIX, Long.MAX_VALUE); + // Only "create_weight" is performed at the query level + Map source = timingType.equals(QueryTimingType.CREATE_WEIGHT.toString()) ? map : breakdown; + long maxEndTime = Math.max(currentMaxEndTime, source.get(timingTypeStartTime) + source.get(timingType)); + long minStartTime = Math.min(currentMinStartTime, source.get(timingTypeStartTime)); + map.put(timingType + MAX_END_TIME_SUFFIX, maxEndTime); + map.put(timingType + MIN_START_TIME_SUFFIX, minStartTime); + } + } - return map; + /** + * This method is used to construct the final query profile breakdown map for return. + * + * @param map a timing count breakdown map with the max end time and min start time information + */ + Map buildQueryProfileBreakdownMap(Map map) { + final Map breakdownMap = new HashMap<>(); + int collectorListSize = collectorList.size(); + // Calculating the total time stats for the query + if (collectorList != null && !collectorList.isEmpty()) { + for (String collector : collectorList) { + Long latestLeafEndTime = map.get(LATEST_LEAF_END_TIME + AT + collector); + if (latestLeafEndTime != null) { + Long currentSliceNodeTime = latestLeafEndTime - map.get( + QueryTimingType.CREATE_WEIGHT + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX + ); + maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime); + minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime); + nodeTimeSum += currentSliceNodeTime; + } + } + avgSliceNodeTime = nodeTimeSum / collectorListSize; + } + for (QueryTimingType timingType : QueryTimingType.values()) { + // Computing the total time for each breakdown including any wait time + String type = timingType.toString(); + String timingTypeStartTime = type + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; + Long timingTypeMaxEndTime = map.getOrDefault(type + MAX_END_TIME_SUFFIX, map.get(timingTypeStartTime) + map.get(type)); + Long timingTypeMinStartTime = map.getOrDefault(type + MIN_START_TIME_SUFFIX, map.get(timingTypeStartTime)); + breakdownMap.put(type, timingTypeMaxEndTime - timingTypeMinStartTime); + breakdownMap.put(type + TIMING_TYPE_COUNT_SUFFIX, map.get(type + TIMING_TYPE_COUNT_SUFFIX)); + // Computing the max end time and min start time for the query + breakdownMaxEndTime = Math.max(breakdownMaxEndTime, timingTypeMaxEndTime); + if (timingTypeMaxEndTime != 0L && timingTypeMinStartTime != 0L) { + breakdownMinStartTime = Math.min(breakdownMinStartTime, timingTypeMinStartTime); + } + // Computing the time/count stats for each breakdown across all slices + if (collectorList != null && !collectorList.isEmpty() && !type.contains(QueryTimingType.CREATE_WEIGHT.toString())) { + String maxSliceTypeTime = MAX_PREFIX + type; + String minSliceTypeTime = MIN_PREFIX + type; + String avgSliceTypeTime = AVG_PREFIX + type; + String maxSliceTypeCount = maxSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; + String minSliceTypeCount = minSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; + String avgSliceTypeCount = avgSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; + Long totalBreakdownTime = 0L; + Long totalBreakdownCount = 0L; + for (String collector : collectorList) { + Long breakdownTime = map.get(type + AT + collector); + Long breakdownCount = map.get(type + TIMING_TYPE_COUNT_SUFFIX + AT + collector); + totalBreakdownTime = constructStatResults( + breakdownMap, + maxSliceTypeTime, + minSliceTypeTime, + totalBreakdownTime, + breakdownTime + ); + totalBreakdownCount = constructStatResults( + breakdownMap, + maxSliceTypeCount, + minSliceTypeCount, + totalBreakdownCount, + breakdownCount + ); + } + breakdownMap.put(avgSliceTypeTime, totalBreakdownTime / collectorListSize); + breakdownMap.put(avgSliceTypeCount, totalBreakdownCount / collectorListSize); + } + } + return breakdownMap; + } + + private Long constructStatResults( + Map breakdownMap, + String maxSliceTypeName, + String minSliceTypeName, + Long totalBreakdown, + Long breakdown + ) { + if (breakdown == null) { + breakdownMap.put(maxSliceTypeName, null); + breakdownMap.put(minSliceTypeName, null); + } else { + breakdownMap.put(maxSliceTypeName, Math.max(breakdownMap.getOrDefault(maxSliceTypeName, Long.MIN_VALUE), breakdown)); + breakdownMap.put(minSliceTypeName, Math.min(breakdownMap.getOrDefault(minSliceTypeName, Long.MAX_VALUE), breakdown)); + totalBreakdown += breakdown; + } + return totalBreakdown; + } + + @Override + public long toNodeTime() { + return breakdownMaxEndTime - breakdownMinStartTime; + } + + @Override + public Long getMaxSliceNodeTime() { + return maxSliceNodeTime; + } + + @Override + public Long getMinSliceNodeTime() { + return minSliceNodeTime; + } + + @Override + public Long getAvgSliceNodeTime() { + return avgSliceNodeTime; + } + + @Override + public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) { + if (collector != null) { + leafToCollector.put(leaf, collector.toString()); + } + } + + @Override + public void buildCollectorList(Collector collector) { + if (collector != null) { + collectorList.add(collector.toString()); + } } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index 12f9a7184d84a..03618ecfc5505 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -34,6 +34,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.Collector; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; @@ -137,4 +138,11 @@ public boolean isCacheable(LeafReaderContext ctx) { return false; } + public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) { + profile.associateLeafToCollector(leaf, collector); + } + + public void buildCollectorList(Collector collector) { + profile.buildCollectorList(collector); + } } diff --git a/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java new file mode 100644 index 0000000000000..db998acb3418d --- /dev/null +++ b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java @@ -0,0 +1,158 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.search.profile.query; + +import org.opensearch.test.OpenSearchTestCase; + +import java.util.HashMap; +import java.util.Map; + +public class ConcurrentQueryProfileBreakdownTests extends OpenSearchTestCase { + private static final String CREATE_WEIGHT = QueryTimingType.CREATE_WEIGHT.toString(); + private static final String BUILD_SCORER = QueryTimingType.BUILD_SCORER.toString(); + private static final String NEXT_DOC = QueryTimingType.NEXT_DOC.toString(); + private static final String ADVANCE = QueryTimingType.ADVANCE.toString(); + private static final String MATCH = QueryTimingType.MATCH.toString(); + private static final String SCORE = QueryTimingType.SCORE.toString(); + private static final String SHALLOW_ADVANCE = QueryTimingType.SHALLOW_ADVANCE.toString(); + private static final String COMPUTE_MAX_SCORE = QueryTimingType.COMPUTE_MAX_SCORE.toString(); + private static final String SET_MIN_COMPETITIVE_SCORE = QueryTimingType.SET_MIN_COMPETITIVE_SCORE.toString(); + private static final String COUNT_SUFFIX = "_count"; + private static final String START_TIME_SUFFIX = "_start_time"; + private static final String MAX_END_TIME_SUFFIX = "_max_end_time"; + private static final String MIN_START_TIME_SUFFIX = "_min_start_time"; + + public void testBuildQueryProfileBreakdownMap() { + Map testMap = new HashMap<>(); + testMap.put(CREATE_WEIGHT, 370343L); + testMap.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); + testMap.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1598347679188617L); + testMap.put(CREATE_WEIGHT + MAX_END_TIME_SUFFIX, 1598347679558960L); + testMap.put(CREATE_WEIGHT + MIN_START_TIME_SUFFIX, 1598347679188617L); + testMap.put(BUILD_SCORER, 0L); + testMap.put(BUILD_SCORER + COUNT_SUFFIX, 6L); + testMap.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); + testMap.put(BUILD_SCORER + MAX_END_TIME_SUFFIX, 1598347688270123L); + testMap.put(BUILD_SCORER + MIN_START_TIME_SUFFIX, 1598347686448701L); + testMap.put(NEXT_DOC, 0L); + testMap.put(NEXT_DOC + COUNT_SUFFIX, 5L); + testMap.put(NEXT_DOC + START_TIME_SUFFIX, 0L); + testMap.put(NEXT_DOC + MAX_END_TIME_SUFFIX, 1598347688298671L); + testMap.put(NEXT_DOC + MIN_START_TIME_SUFFIX, 1598347688110288L); + testMap.put(ADVANCE, 0L); + testMap.put(ADVANCE + COUNT_SUFFIX, 3L); + testMap.put(ADVANCE + START_TIME_SUFFIX, 0L); + testMap.put(ADVANCE + MAX_END_TIME_SUFFIX, 1598347688280739L); + testMap.put(ADVANCE + MIN_START_TIME_SUFFIX, 1598347687991984L); + testMap.put(MATCH, 0L); + testMap.put(MATCH + COUNT_SUFFIX, 0L); + testMap.put(MATCH + START_TIME_SUFFIX, 0L); + testMap.put(MATCH + MAX_END_TIME_SUFFIX, 0L); + testMap.put(MATCH + MIN_START_TIME_SUFFIX, 0L); + testMap.put(SCORE, 0L); + testMap.put(SCORE + COUNT_SUFFIX, 5L); + testMap.put(SCORE + START_TIME_SUFFIX, 0L); + testMap.put(SCORE + MAX_END_TIME_SUFFIX, 1598347688282743L); + testMap.put(SCORE + MIN_START_TIME_SUFFIX, 1598347688018500L); + testMap.put(SHALLOW_ADVANCE, 0L); + testMap.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); + testMap.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); + testMap.put(SHALLOW_ADVANCE + MAX_END_TIME_SUFFIX, 0L); + testMap.put(SHALLOW_ADVANCE + MIN_START_TIME_SUFFIX, 0L); + testMap.put(COMPUTE_MAX_SCORE, 0L); + testMap.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); + testMap.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); + testMap.put(COMPUTE_MAX_SCORE + MAX_END_TIME_SUFFIX, 0L); + testMap.put(COMPUTE_MAX_SCORE + MIN_START_TIME_SUFFIX, 0L); + testMap.put(SET_MIN_COMPETITIVE_SCORE, 0L); + testMap.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); + testMap.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); + testMap.put(SET_MIN_COMPETITIVE_SCORE + MAX_END_TIME_SUFFIX, 0L); + testMap.put(SET_MIN_COMPETITIVE_SCORE + MIN_START_TIME_SUFFIX, 0L); + ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); + Map breakdownMap = profileBreakdown.buildQueryProfileBreakdownMap(testMap); + assertEquals(18, breakdownMap.size()); + assertEquals( + "{set_min_competitive_score_count=0, match_count=0, shallow_advance_count=0, set_min_competitive_score=0, next_doc=188383, match=0, next_doc_count=5, score_count=5, compute_max_score_count=0, compute_max_score=0, advance=288755, advance_count=3, score=264243, build_scorer_count=6, create_weight=370343, shallow_advance=0, create_weight_count=1, build_scorer=1821422}", + breakdownMap.toString() + ); + } + + public void testAddMaxEndTimeAndMinStartTime() { + Map map = new HashMap<>(); + map.put(CREATE_WEIGHT, 201692L); + map.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); + map.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1629732014278990L); + map.put(BUILD_SCORER, 0L); + map.put(BUILD_SCORER + COUNT_SUFFIX, 2L); + map.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); + map.put(NEXT_DOC, 0L); + map.put(NEXT_DOC + COUNT_SUFFIX, 2L); + map.put(NEXT_DOC + START_TIME_SUFFIX, 0L); + map.put(ADVANCE, 0L); + map.put(ADVANCE + COUNT_SUFFIX, 1L); + map.put(ADVANCE + START_TIME_SUFFIX, 0L); + map.put(MATCH, 0L); + map.put(MATCH + COUNT_SUFFIX, 0L); + map.put(MATCH + START_TIME_SUFFIX, 0L); + map.put(SCORE, 0L); + map.put(SCORE + COUNT_SUFFIX, 2L); + map.put(SCORE + START_TIME_SUFFIX, 0L); + map.put(SHALLOW_ADVANCE, 0L); + map.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); + map.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); + map.put(COMPUTE_MAX_SCORE, 0L); + map.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); + map.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); + map.put(SET_MIN_COMPETITIVE_SCORE, 0L); + map.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); + map.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); + + Map breakdown = new HashMap<>(); + breakdown.put(CREATE_WEIGHT, 0L); + breakdown.put(CREATE_WEIGHT + COUNT_SUFFIX, 0L); + breakdown.put(CREATE_WEIGHT + START_TIME_SUFFIX, 0L); + breakdown.put(BUILD_SCORER, 9649L); + breakdown.put(BUILD_SCORER + COUNT_SUFFIX, 2L); + breakdown.put(BUILD_SCORER + START_TIME_SUFFIX, 1629732030749745L); + breakdown.put(NEXT_DOC, 1150L); + breakdown.put(NEXT_DOC + COUNT_SUFFIX, 2L); + breakdown.put(NEXT_DOC + START_TIME_SUFFIX, 1629732030806446L); + breakdown.put(ADVANCE, 920L); + breakdown.put(ADVANCE + COUNT_SUFFIX, 1L); + breakdown.put(ADVANCE + START_TIME_SUFFIX, 1629732030776129L); + breakdown.put(MATCH, 0L); + breakdown.put(MATCH + COUNT_SUFFIX, 0L); + breakdown.put(MATCH + START_TIME_SUFFIX, 0L); + breakdown.put(SCORE, 1050L); + breakdown.put(SCORE + COUNT_SUFFIX, 2L); + breakdown.put(SCORE + START_TIME_SUFFIX, 1629732030778977L); + breakdown.put(SHALLOW_ADVANCE, 0L); + breakdown.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); + breakdown.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); + breakdown.put(COMPUTE_MAX_SCORE, 0L); + breakdown.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); + breakdown.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); + breakdown.put(SET_MIN_COMPETITIVE_SCORE, 0L); + breakdown.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); + breakdown.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); + ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); + profileBreakdown.addMaxEndTimeAndMinStartTime(map, breakdown); + assertEquals(45, map.size()); + assertEquals( + "{set_min_competitive_score_count=0, match_count=0, score_start_time=0, shallow_advance_count=0, create_weight_start_time=1629732014278990, next_doc=0, compute_max_score_start_time=0, shallow_advance_min_start_time=0, score_count=2, compute_max_score_count=0, advance_start_time=0, advance=0, advance_count=1, compute_max_score_min_start_time=0, score=0, next_doc_max_end_time=1629732030807596, advance_max_end_time=1629732030777049, next_doc_start_time=0, shallow_advance=0, build_scorer_max_end_time=1629732030759394, create_weight_count=1, create_weight_max_end_time=1629732014480682, match_min_start_time=0, build_scorer=0, compute_max_score_max_end_time=0, next_doc_min_start_time=1629732030806446, set_min_competitive_score=0, set_min_competitive_score_start_time=0, match=0, set_min_competitive_score_max_end_time=0, match_start_time=0, shallow_advance_max_end_time=0, build_scorer_start_time=0, next_doc_count=2, shallow_advance_start_time=0, set_min_competitive_score_min_start_time=0, compute_max_score=0, create_weight_min_start_time=1629732014278990, build_scorer_count=2, create_weight=201692, score_min_start_time=1629732030778977, match_max_end_time=0, advance_min_start_time=1629732030776129, score_max_end_time=1629732030780027, build_scorer_min_start_time=1629732030749745}", + map.toString() + ); + } +} From a6cee8044276d11e1c1befdab98e503e76afe710 Mon Sep 17 00:00:00 2001 From: Ticheng Lin Date: Mon, 28 Aug 2023 15:46:21 -0700 Subject: [PATCH 2/9] Refactor and work on the PR comments Signed-off-by: Ticheng Lin --- .../search/internal/ContextIndexSearcher.java | 3 +- .../profile/AbstractInternalProfileTree.java | 9 +- .../profile/AbstractProfileBreakdown.java | 14 +- .../profile/ContextualProfileBreakdown.java | 5 +- .../AggregationProfileBreakdown.java | 17 +- .../query/AbstractQueryProfileTree.java | 70 +++++ .../ConcurrentQueryProfileBreakdown.java | 274 +++++++++--------- .../query/ConcurrentQueryProfileTree.java | 49 ++++ .../query/InternalQueryProfileTree.java | 61 +--- .../search/profile/query/ProfileWeight.java | 8 +- .../search/profile/query/QueryProfiler.java | 8 +- .../ConcurrentQueryProfileBreakdownTests.java | 77 +---- .../profile/query/QueryProfilerTests.java | 139 +++++++-- .../search/query/QueryProfilePhaseTests.java | 96 ++++++ 14 files changed, 492 insertions(+), 338 deletions(-) create mode 100644 server/src/main/java/org/opensearch/search/profile/query/AbstractQueryProfileTree.java create mode 100644 server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java diff --git a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java index acd61f1a94679..aa86ed4e56801 100644 --- a/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/opensearch/search/internal/ContextIndexSearcher.java @@ -300,8 +300,7 @@ private void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collecto try { cancellable.checkCancelled(); if (weight instanceof ProfileWeight) { - ((ProfileWeight) weight).associateLeafToCollector(ctx, collector); - ((ProfileWeight) weight).buildCollectorList(collector); + ((ProfileWeight) weight).associateCollectorToLeaves(ctx, collector); } weight = wrapWeight(weight); // See please https://github.com/apache/lucene/pull/964 diff --git a/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java b/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java index ea93a80a4d552..d1f525477f25f 100644 --- a/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/AbstractInternalProfileTree.java @@ -180,16 +180,17 @@ private ProfileResult doGetTree(int token) { // calculating the same times over and over...but worth the effort? String type = getTypeFromElement(element); String description = getDescriptionFromElement(element); + return createProfileResult(type, description, breakdown, childrenProfileResults); + } + + protected ProfileResult createProfileResult(String type, String description, PB breakdown, List childrenProfileResults) { return new ProfileResult( type, description, breakdown.toBreakdownMap(), breakdown.toDebugMap(), breakdown.toNodeTime(), - childrenProfileResults, - breakdown.getMaxSliceNodeTime(), - breakdown.getMinSliceNodeTime(), - breakdown.getAvgSliceNodeTime() + childrenProfileResults ); } diff --git a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java index e7966aa90425c..4a1563e7cdce9 100644 --- a/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/AbstractProfileBreakdown.java @@ -88,7 +88,7 @@ public Map toBreakdownMap() { /** * Fetch extra debugging information. */ - protected Map toDebugMap() { + public Map toDebugMap() { return emptyMap(); } @@ -99,16 +99,4 @@ public long toNodeTime() { } return total; } - - public Long getMaxSliceNodeTime() { - return null; - } - - public Long getMinSliceNodeTime() { - return null; - } - - public Long getAvgSliceNodeTime() { - return null; - } } diff --git a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java index d3e5c8089aeea..143c7df7c1233 100644 --- a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java @@ -18,7 +18,6 @@ * @opensearch.internal */ public abstract class ContextualProfileBreakdown> extends AbstractProfileBreakdown { - public ContextualProfileBreakdown(Class clazz) { super(clazz); } @@ -30,7 +29,5 @@ public ContextualProfileBreakdown(Class clazz) { */ public abstract AbstractProfileBreakdown context(Object context); - public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) {} - - public void buildCollectorList(Collector collector) {} + public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) {} } diff --git a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java index d0c67915e6d8d..9fa628f107f7b 100644 --- a/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/aggregation/AggregationProfileBreakdown.java @@ -34,7 +34,6 @@ import org.opensearch.search.profile.AbstractProfileBreakdown; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -60,21 +59,7 @@ public void addDebugInfo(String key, Object value) { } @Override - protected Map toDebugMap() { + public Map toDebugMap() { return unmodifiableMap(extra); } - - /** - * Build a timing count startTime breakdown for aggregation timing types - */ - @Override - public Map toBreakdownMap() { - Map map = new HashMap<>(timings.length * 3); - for (AggregationTimingType timingType : timingTypes) { - map.put(timingType.toString(), timings[timingType.ordinal()].getApproximateTiming()); - map.put(timingType + TIMING_TYPE_COUNT_SUFFIX, timings[timingType.ordinal()].getCount()); - map.put(timingType + TIMING_TYPE_START_TIME_SUFFIX, timings[timingType.ordinal()].getEarliestTimerStartTime()); - } - return Collections.unmodifiableMap(map); - } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/AbstractQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/AbstractQueryProfileTree.java new file mode 100644 index 0000000000000..8e825def13f5d --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/query/AbstractQueryProfileTree.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.query; + +import org.apache.lucene.search.Query; +import org.opensearch.search.profile.AbstractInternalProfileTree; +import org.opensearch.search.profile.ContextualProfileBreakdown; +import org.opensearch.search.profile.ProfileResult; + +/** + * This class tracks the dependency tree for queries (scoring and rewriting) and + * generates {@link QueryProfileBreakdown} for each node in the tree. It also finalizes the tree + * and returns a list of {@link ProfileResult} that can be serialized back to the client + * + * @opensearch.internal + */ +public abstract class AbstractQueryProfileTree extends AbstractInternalProfileTree, Query> { + + /** Rewrite time */ + private long rewriteTime; + private long rewriteScratch; + + @Override + protected String getTypeFromElement(Query query) { + // Anonymous classes won't have a name, + // we need to get the super class + if (query.getClass().getSimpleName().isEmpty()) { + return query.getClass().getSuperclass().getSimpleName(); + } + return query.getClass().getSimpleName(); + } + + @Override + protected String getDescriptionFromElement(Query query) { + return query.toString(); + } + + /** + * Begin timing a query for a specific Timing context + */ + public void startRewriteTime() { + assert rewriteScratch == 0; + rewriteScratch = System.nanoTime(); + } + + /** + * Halt the timing process and add the elapsed rewriting time. + * startRewriteTime() must be called for a particular context prior to calling + * stopAndAddRewriteTime(), otherwise the elapsed time will be negative and + * nonsensical + * + * @return The elapsed time + */ + public long stopAndAddRewriteTime() { + long time = Math.max(1, System.nanoTime() - rewriteScratch); + rewriteTime += time; + rewriteScratch = 0; + return time; + } + + public long getRewriteTime() { + return rewriteTime; + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 2d4059d104bcb..f35f87bb320d5 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -29,20 +29,17 @@ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBreakdown { public static final String MAX_END_TIME_SUFFIX = "_max_end_time"; public static final String MIN_START_TIME_SUFFIX = "_min_start_time"; - private static final String AT = "@"; - private static final String LATEST_LEAF_END_TIME = "latest_leaf_end_time"; private static final String MAX_PREFIX = "max_"; private static final String MIN_PREFIX = "min_"; private static final String AVG_PREFIX = "avg_"; - private static long breakdownMaxEndTime = 0; - private static long breakdownMinStartTime = Long.MAX_VALUE; - private static long maxSliceNodeTime = Long.MIN_VALUE; - private static long minSliceNodeTime = Long.MAX_VALUE; - private static long avgSliceNodeTime = 0L; - private static long nodeTimeSum = 0L; + private long breakdownMaxEndTime = Long.MIN_VALUE; + private long breakdownMinStartTime = Long.MAX_VALUE; + private long maxSliceNodeTime = Long.MIN_VALUE; + private long minSliceNodeTime = Long.MAX_VALUE; + private long avgSliceNodeTime = 0L; + private long nodeTimeSum = 0L; private final Map> contexts = new ConcurrentHashMap<>(); - private List collectorList = new ArrayList<>(); - private Map leafToCollector = new HashMap<>(); + private Map> collectorToLeaves = new HashMap<>(); /** Sole constructor. */ public ConcurrentQueryProfileBreakdown() { @@ -63,146 +60,146 @@ public AbstractProfileBreakdown context(Object context) { @Override public Map toBreakdownMap() { - final Map map = new HashMap<>(super.toBreakdownMap()); - - for (final Map.Entry> context : contexts.entrySet()) { - Map breakdown = context.getValue().toBreakdownMap(); - String collector = leafToCollector.get(context.getKey()); - for (final Map.Entry entry : breakdown.entrySet()) { - String breakdownType = entry.getKey(); - Long breakdownValue = entry.getValue(); - // Adding breakdown type count - if (breakdownType.contains(TIMING_TYPE_COUNT_SUFFIX)) { - map.merge(breakdownType, breakdownValue, Long::sum); - } - // Adding slice level breakdown - if (!breakdownType.contains(TIMING_TYPE_START_TIME_SUFFIX)) { - String sliceLevelBreakdownType = breakdownType + AT + collector; - map.put(sliceLevelBreakdownType, map.getOrDefault(sliceLevelBreakdownType, 0L) + breakdownValue); + Map> sliceLevelBreakdown = new HashMap<>(); + if (collectorToLeaves == null || collectorToLeaves.isEmpty() || contexts.isEmpty()) { + return new HashMap<>(super.toBreakdownMap()); + } + for (Map.Entry> slice : collectorToLeaves.entrySet()) { + String sliceCollector = slice.getKey(); + List leaves = slice.getValue(); + long lastLeafEndTime = 0L; + for (LeafReaderContext leaf : leaves) { + long currentLeafEndTime = 0L; + Map currentLeafBreakdownMap = contexts.get(leaf).toBreakdownMap(); + for (Map.Entry breakDownEntry : currentLeafBreakdownMap.entrySet()) { + Map currentSliceBreakdown = sliceLevelBreakdown.getOrDefault( + sliceCollector, + new HashMap<>(super.toBreakdownMap()) + ); + String breakdownType = breakDownEntry.getKey(); + Long breakdownValue = breakDownEntry.getValue(); + // Adding breakdown type count + if (breakdownType.contains(TIMING_TYPE_COUNT_SUFFIX)) { + currentSliceBreakdown.merge(breakdownType, breakdownValue, Long::sum); + } + if (breakdownType.contains(TIMING_TYPE_START_TIME_SUFFIX)) { + // Finding the earliest start time and the last end time for each breakdown types within the current slice to + // compute the total breakdown time + String method = breakdownType.split(TIMING_TYPE_START_TIME_SUFFIX)[0]; + String maxEndTimeKey = method + MAX_END_TIME_SUFFIX; + String minStartTimeKey = method + MIN_START_TIME_SUFFIX; + long maxEndTime = Math.max( + currentSliceBreakdown.getOrDefault(maxEndTimeKey, Long.MIN_VALUE), + breakdownValue + currentLeafBreakdownMap.get(method) + ); + long minStartTime = Math.min(currentSliceBreakdown.getOrDefault(minStartTimeKey, Long.MAX_VALUE), breakdownValue); + currentSliceBreakdown.put(maxEndTimeKey, maxEndTime); + currentSliceBreakdown.put(minStartTimeKey, minStartTime); + // Finding the current leaf end time to compute the last leaf end time + currentLeafEndTime = Math.max(currentLeafEndTime, maxEndTime); + } + sliceLevelBreakdown.put(sliceCollector, currentSliceBreakdown); } + lastLeafEndTime = Math.max(lastLeafEndTime, currentLeafEndTime); } - addMaxEndTimeAndMinStartTime(map, breakdown); - // Adding slice level latest leaf end time - long currentLeafEndTime = 0L; + // Computing breakdown type total time for (QueryTimingType queryTimingType : QueryTimingType.values()) { String timingType = queryTimingType.toString(); - String timingTypeStartTime = timingType + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; - currentLeafEndTime = Math.max(currentLeafEndTime, breakdown.get(timingTypeStartTime) + breakdown.get(timingType)); + if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { + long currentSliceMaxEndTime = sliceLevelBreakdown.get(sliceCollector).get(timingType + MAX_END_TIME_SUFFIX); + long currentSliceMinStartTime = sliceLevelBreakdown.get(sliceCollector).get(timingType + MIN_START_TIME_SUFFIX); + sliceLevelBreakdown.get(sliceCollector).put(timingType, currentSliceMaxEndTime - currentSliceMinStartTime); + } } - String latestLeafEndTime = LATEST_LEAF_END_TIME + AT + collector; - map.put(latestLeafEndTime, Math.max(map.getOrDefault(latestLeafEndTime, 0L), currentLeafEndTime)); - } - return buildQueryProfileBreakdownMap(map); - } - - /** - * This method is used to bring the max end time and min start time fields to the query profile breakdown. - * - * @param map the query level timing count breakdown for current instance - * @param breakdown the segment level timing count breakdown for current instance - */ - void addMaxEndTimeAndMinStartTime(Map map, Map breakdown) { - for (QueryTimingType queryTimingType : QueryTimingType.values()) { - String timingType = queryTimingType.toString(); - String timingTypeStartTime = timingType + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; - long currentMaxEndTime = map.getOrDefault(timingType + MAX_END_TIME_SUFFIX, 0L); - long currentMinStartTime = map.getOrDefault(timingType + MIN_START_TIME_SUFFIX, Long.MAX_VALUE); - // Only "create_weight" is performed at the query level - Map source = timingType.equals(QueryTimingType.CREATE_WEIGHT.toString()) ? map : breakdown; - long maxEndTime = Math.max(currentMaxEndTime, source.get(timingTypeStartTime) + source.get(timingType)); - long minStartTime = Math.min(currentMinStartTime, source.get(timingTypeStartTime)); - map.put(timingType + MAX_END_TIME_SUFFIX, maxEndTime); - map.put(timingType + MIN_START_TIME_SUFFIX, minStartTime); + // Computing slice level node time and stats + long createWeightStartTime = sliceLevelBreakdown.get(sliceCollector) + .get(QueryTimingType.CREATE_WEIGHT + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX); + breakdownMaxEndTime = Math.max(breakdownMaxEndTime, lastLeafEndTime); + breakdownMinStartTime = createWeightStartTime; + long currentSliceNodeTime = lastLeafEndTime - createWeightStartTime; + maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime); + minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime); + nodeTimeSum += currentSliceNodeTime; } + // Computing avg slice node time + avgSliceNodeTime = nodeTimeSum / collectorToLeaves.size(); + return buildFinalBreakdownMap(sliceLevelBreakdown); } /** * This method is used to construct the final query profile breakdown map for return. * - * @param map a timing count breakdown map with the max end time and min start time information + * @param sliceLevelBreakdown a timing count breakdown map with the max end time and min start time information */ - Map buildQueryProfileBreakdownMap(Map map) { + Map buildFinalBreakdownMap(Map> sliceLevelBreakdown) { final Map breakdownMap = new HashMap<>(); - int collectorListSize = collectorList.size(); - // Calculating the total time stats for the query - if (collectorList != null && !collectorList.isEmpty()) { - for (String collector : collectorList) { - Long latestLeafEndTime = map.get(LATEST_LEAF_END_TIME + AT + collector); - if (latestLeafEndTime != null) { - Long currentSliceNodeTime = latestLeafEndTime - map.get( - QueryTimingType.CREATE_WEIGHT + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX + final Map totalTimeMap = new HashMap<>(); + int sliceCount = sliceLevelBreakdown.size(); + for (Map.Entry> slice : sliceLevelBreakdown.entrySet()) { + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingType = queryTimingType.toString(); + // Computing the time/count stats for each breakdown across all slices + if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { + String maxBreakdownTypeTime = MAX_PREFIX + timingType; + String minBreakdownTypeTime = MIN_PREFIX + timingType; + String avgBreakdownTypeTime = AVG_PREFIX + timingType; + String maxBreakdownTypeCount = maxBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + String minBreakdownTypeCount = minBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + String avgBreakdownTypeCount = avgBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + Long breakdownTime = slice.getValue().get(timingType); + Long breakdownCount = slice.getValue().get(timingType + TIMING_TYPE_COUNT_SUFFIX); + breakdownMap.put( + maxBreakdownTypeTime, + Math.max(breakdownMap.getOrDefault(maxBreakdownTypeTime, Long.MIN_VALUE), breakdownTime) ); - maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime); - minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime); - nodeTimeSum += currentSliceNodeTime; - } - } - avgSliceNodeTime = nodeTimeSum / collectorListSize; - } - for (QueryTimingType timingType : QueryTimingType.values()) { - // Computing the total time for each breakdown including any wait time - String type = timingType.toString(); - String timingTypeStartTime = type + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; - Long timingTypeMaxEndTime = map.getOrDefault(type + MAX_END_TIME_SUFFIX, map.get(timingTypeStartTime) + map.get(type)); - Long timingTypeMinStartTime = map.getOrDefault(type + MIN_START_TIME_SUFFIX, map.get(timingTypeStartTime)); - breakdownMap.put(type, timingTypeMaxEndTime - timingTypeMinStartTime); - breakdownMap.put(type + TIMING_TYPE_COUNT_SUFFIX, map.get(type + TIMING_TYPE_COUNT_SUFFIX)); - // Computing the max end time and min start time for the query - breakdownMaxEndTime = Math.max(breakdownMaxEndTime, timingTypeMaxEndTime); - if (timingTypeMaxEndTime != 0L && timingTypeMinStartTime != 0L) { - breakdownMinStartTime = Math.min(breakdownMinStartTime, timingTypeMinStartTime); - } - // Computing the time/count stats for each breakdown across all slices - if (collectorList != null && !collectorList.isEmpty() && !type.contains(QueryTimingType.CREATE_WEIGHT.toString())) { - String maxSliceTypeTime = MAX_PREFIX + type; - String minSliceTypeTime = MIN_PREFIX + type; - String avgSliceTypeTime = AVG_PREFIX + type; - String maxSliceTypeCount = maxSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; - String minSliceTypeCount = minSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; - String avgSliceTypeCount = avgSliceTypeTime + TIMING_TYPE_COUNT_SUFFIX; - Long totalBreakdownTime = 0L; - Long totalBreakdownCount = 0L; - for (String collector : collectorList) { - Long breakdownTime = map.get(type + AT + collector); - Long breakdownCount = map.get(type + TIMING_TYPE_COUNT_SUFFIX + AT + collector); - totalBreakdownTime = constructStatResults( - breakdownMap, - maxSliceTypeTime, - minSliceTypeTime, - totalBreakdownTime, - breakdownTime + breakdownMap.put( + minBreakdownTypeTime, + Math.min(breakdownMap.getOrDefault(minBreakdownTypeTime, Long.MAX_VALUE), breakdownTime) + ); + breakdownMap.put(avgBreakdownTypeTime, breakdownMap.getOrDefault(avgBreakdownTypeTime, 0L) + breakdownTime); + breakdownMap.put( + maxBreakdownTypeCount, + Math.max(breakdownMap.getOrDefault(maxBreakdownTypeCount, Long.MIN_VALUE), breakdownCount) ); - totalBreakdownCount = constructStatResults( - breakdownMap, - maxSliceTypeCount, - minSliceTypeCount, - totalBreakdownCount, - breakdownCount + breakdownMap.put( + minBreakdownTypeCount, + Math.min(breakdownMap.getOrDefault(minBreakdownTypeCount, Long.MAX_VALUE), breakdownCount) ); + breakdownMap.put(avgBreakdownTypeCount, breakdownMap.getOrDefault(avgBreakdownTypeCount, 0L) + breakdownCount); + } + // Finding the max slice end time and min slice start time to compute total time + String maxEndTime = timingType + MAX_END_TIME_SUFFIX; + String minStartTime = timingType + MIN_START_TIME_SUFFIX; + Long maxEndTimeValue = slice.getValue().get(timingType + MAX_END_TIME_SUFFIX); + Long minStartTimeValue = slice.getValue().get(timingType + MIN_START_TIME_SUFFIX); + totalTimeMap.put(maxEndTime, Math.max(totalTimeMap.getOrDefault(maxEndTime, Long.MIN_VALUE), maxEndTimeValue)); + totalTimeMap.put(minStartTime, Math.min(totalTimeMap.getOrDefault(minStartTime, Long.MAX_VALUE), minStartTimeValue)); + // Computing the total count for each breakdown across all slices + String breakdownCount = timingType + TIMING_TYPE_COUNT_SUFFIX; + Long breakdownCountValue = slice.getValue().get(breakdownCount); + if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { + breakdownMap.put(breakdownCount, breakdownMap.getOrDefault(breakdownCount, 0L) + breakdownCountValue); + } else { + breakdownMap.put(breakdownCount, 1L); + breakdownMap.put(timingType, slice.getValue().get(timingType)); } - breakdownMap.put(avgSliceTypeTime, totalBreakdownTime / collectorListSize); - breakdownMap.put(avgSliceTypeCount, totalBreakdownCount / collectorListSize); } } - return breakdownMap; - } - - private Long constructStatResults( - Map breakdownMap, - String maxSliceTypeName, - String minSliceTypeName, - Long totalBreakdown, - Long breakdown - ) { - if (breakdown == null) { - breakdownMap.put(maxSliceTypeName, null); - breakdownMap.put(minSliceTypeName, null); - } else { - breakdownMap.put(maxSliceTypeName, Math.max(breakdownMap.getOrDefault(maxSliceTypeName, Long.MIN_VALUE), breakdown)); - breakdownMap.put(minSliceTypeName, Math.min(breakdownMap.getOrDefault(minSliceTypeName, Long.MAX_VALUE), breakdown)); - totalBreakdown += breakdown; + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingType = queryTimingType.toString(); + if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { + // Computing avg time/count stats + String avgBreakdownTypeTime = AVG_PREFIX + timingType; + String avgBreakdownTypeCount = avgBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + breakdownMap.put(avgBreakdownTypeTime, breakdownMap.get(avgBreakdownTypeTime) / sliceCount); + breakdownMap.put(avgBreakdownTypeCount, breakdownMap.get(avgBreakdownTypeCount) / sliceCount); + // Computing total time for each breakdown including any wait time + Long maxSliceEndTime = totalTimeMap.get(timingType + MAX_END_TIME_SUFFIX); + Long mingSliceStartTime = totalTimeMap.get(timingType + MIN_START_TIME_SUFFIX); + breakdownMap.put(timingType, maxSliceEndTime - mingSliceStartTime); + } } - return totalBreakdown; + return breakdownMap; } @Override @@ -210,32 +207,25 @@ public long toNodeTime() { return breakdownMaxEndTime - breakdownMinStartTime; } - @Override public Long getMaxSliceNodeTime() { return maxSliceNodeTime; } - @Override public Long getMinSliceNodeTime() { return minSliceNodeTime; } - @Override public Long getAvgSliceNodeTime() { return avgSliceNodeTime; } @Override - public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) { - if (collector != null) { - leafToCollector.put(leaf, collector.toString()); - } - } - - @Override - public void buildCollectorList(Collector collector) { + public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) { if (collector != null) { - collectorList.add(collector.toString()); + String collectorManager = collector.toString(); + List leaves = collectorToLeaves.getOrDefault(collectorManager, new ArrayList<>()); + leaves.add(leaf); + collectorToLeaves.put(collectorManager, leaves); } } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java new file mode 100644 index 0000000000000..b9d4a5b3c9b45 --- /dev/null +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java @@ -0,0 +1,49 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.search.profile.query; + +import org.opensearch.search.profile.ContextualProfileBreakdown; +import org.opensearch.search.profile.ProfileResult; + +import java.util.List; + +/** + * This class returns a list of {@link ProfileResult} that can be serialized back to the client in the concurrent execution. + * + * @opensearch.internal + */ +public class ConcurrentQueryProfileTree extends AbstractQueryProfileTree { + + @Override + protected ContextualProfileBreakdown createProfileBreakdown() { + return new ConcurrentQueryProfileBreakdown(); + } + + @Override + protected ProfileResult createProfileResult( + String type, + String description, + ContextualProfileBreakdown breakdown, + List childrenProfileResults + ) { + assert breakdown instanceof ConcurrentQueryProfileBreakdown; + final ConcurrentQueryProfileBreakdown concurrentBreakdown = (ConcurrentQueryProfileBreakdown) breakdown; + return new ProfileResult( + type, + description, + concurrentBreakdown.toBreakdownMap(), + concurrentBreakdown.toDebugMap(), + concurrentBreakdown.toNodeTime(), + childrenProfileResults, + concurrentBreakdown.getMaxSliceNodeTime(), + concurrentBreakdown.getMinSliceNodeTime(), + concurrentBreakdown.getAvgSliceNodeTime() + ); + } +} diff --git a/server/src/main/java/org/opensearch/search/profile/query/InternalQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/InternalQueryProfileTree.java index 40c6a29384491..1ed367f094fb7 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/InternalQueryProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/query/InternalQueryProfileTree.java @@ -32,73 +32,18 @@ package org.opensearch.search.profile.query; -import org.apache.lucene.search.Query; -import org.opensearch.search.profile.AbstractInternalProfileTree; import org.opensearch.search.profile.ContextualProfileBreakdown; import org.opensearch.search.profile.ProfileResult; /** - * This class tracks the dependency tree for queries (scoring and rewriting) and - * generates {@link QueryProfileBreakdown} for each node in the tree. It also finalizes the tree - * and returns a list of {@link ProfileResult} that can be serialized back to the client + * This class returns a list of {@link ProfileResult} that can be serialized back to the client in the non-concurrent execution. * * @opensearch.internal */ -final class InternalQueryProfileTree extends AbstractInternalProfileTree, Query> { - - /** Rewrite time */ - private long rewriteTime; - private long rewriteScratch; - private final boolean concurrent; - - InternalQueryProfileTree(boolean concurrent) { - this.concurrent = concurrent; - } +public class InternalQueryProfileTree extends AbstractQueryProfileTree { @Override protected ContextualProfileBreakdown createProfileBreakdown() { - return (concurrent) ? new ConcurrentQueryProfileBreakdown() : new QueryProfileBreakdown(); - } - - @Override - protected String getTypeFromElement(Query query) { - // Anonymous classes won't have a name, - // we need to get the super class - if (query.getClass().getSimpleName().isEmpty()) { - return query.getClass().getSuperclass().getSimpleName(); - } - return query.getClass().getSimpleName(); - } - - @Override - protected String getDescriptionFromElement(Query query) { - return query.toString(); - } - - /** - * Begin timing a query for a specific Timing context - */ - public void startRewriteTime() { - assert rewriteScratch == 0; - rewriteScratch = System.nanoTime(); - } - - /** - * Halt the timing process and add the elapsed rewriting time. - * startRewriteTime() must be called for a particular context prior to calling - * stopAndAddRewriteTime(), otherwise the elapsed time will be negative and - * nonsensical - * - * @return The elapsed time - */ - public long stopAndAddRewriteTime() { - long time = Math.max(1, System.nanoTime() - rewriteScratch); - rewriteTime += time; - rewriteScratch = 0; - return time; - } - - public long getRewriteTime() { - return rewriteTime; + return new QueryProfileBreakdown(); } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index 03618ecfc5505..0cdacdbeb46d9 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -138,11 +138,7 @@ public boolean isCacheable(LeafReaderContext ctx) { return false; } - public void associateLeafToCollector(LeafReaderContext leaf, Collector collector) { - profile.associateLeafToCollector(leaf, collector); - } - - public void buildCollectorList(Collector collector) { - profile.buildCollectorList(collector); + public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) { + profile.associateCollectorToLeaves(leaf, collector); } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/QueryProfiler.java b/server/src/main/java/org/opensearch/search/profile/query/QueryProfiler.java index 9527e010005c3..b1024c2312c50 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/QueryProfiler.java +++ b/server/src/main/java/org/opensearch/search/profile/query/QueryProfiler.java @@ -59,7 +59,7 @@ public final class QueryProfiler extends AbstractProfiler testMap = new HashMap<>(); testMap.put(CREATE_WEIGHT, 370343L); testMap.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); @@ -81,78 +81,13 @@ public void testBuildQueryProfileBreakdownMap() { testMap.put(SET_MIN_COMPETITIVE_SCORE + MAX_END_TIME_SUFFIX, 0L); testMap.put(SET_MIN_COMPETITIVE_SCORE + MIN_START_TIME_SUFFIX, 0L); ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); - Map breakdownMap = profileBreakdown.buildQueryProfileBreakdownMap(testMap); - assertEquals(18, breakdownMap.size()); + Map> sliceLevelBreakdown = new HashMap<>(); + sliceLevelBreakdown.put("testCollector", testMap); + Map breakdownMap = profileBreakdown.buildFinalBreakdownMap(sliceLevelBreakdown); + assertEquals(66, breakdownMap.size()); assertEquals( - "{set_min_competitive_score_count=0, match_count=0, shallow_advance_count=0, set_min_competitive_score=0, next_doc=188383, match=0, next_doc_count=5, score_count=5, compute_max_score_count=0, compute_max_score=0, advance=288755, advance_count=3, score=264243, build_scorer_count=6, create_weight=370343, shallow_advance=0, create_weight_count=1, build_scorer=1821422}", + "{max_match=0, set_min_competitive_score_count=0, match_count=0, avg_score_count=5, shallow_advance_count=0, next_doc=188383, min_build_scorer=0, score_count=5, compute_max_score_count=0, advance=288755, min_advance=0, min_set_min_competitive_score=0, score=264243, avg_set_min_competitive_score_count=0, min_match_count=0, avg_score=0, max_next_doc_count=5, avg_shallow_advance=0, max_compute_max_score_count=0, max_shallow_advance_count=0, set_min_competitive_score=0, min_build_scorer_count=6, next_doc_count=5, avg_next_doc=0, min_match=0, compute_max_score=0, max_build_scorer=0, min_set_min_competitive_score_count=0, avg_match_count=0, avg_advance=0, build_scorer_count=6, avg_build_scorer_count=6, min_next_doc_count=5, avg_match=0, max_score_count=5, min_shallow_advance_count=0, avg_compute_max_score=0, max_advance=0, avg_shallow_advance_count=0, avg_set_min_competitive_score=0, avg_compute_max_score_count=0, avg_build_scorer=0, max_set_min_competitive_score_count=0, advance_count=3, max_build_scorer_count=6, shallow_advance=0, max_match_count=0, min_compute_max_score=0, create_weight_count=1, build_scorer=1821422, max_compute_max_score=0, max_set_min_competitive_score=0, min_shallow_advance=0, match=0, min_next_doc=0, avg_advance_count=3, max_shallow_advance=0, max_advance_count=3, min_score=0, max_next_doc=0, create_weight=370343, avg_next_doc_count=5, max_score=0, min_compute_max_score_count=0, min_score_count=5, min_advance_count=3}", breakdownMap.toString() ); } - - public void testAddMaxEndTimeAndMinStartTime() { - Map map = new HashMap<>(); - map.put(CREATE_WEIGHT, 201692L); - map.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); - map.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1629732014278990L); - map.put(BUILD_SCORER, 0L); - map.put(BUILD_SCORER + COUNT_SUFFIX, 2L); - map.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); - map.put(NEXT_DOC, 0L); - map.put(NEXT_DOC + COUNT_SUFFIX, 2L); - map.put(NEXT_DOC + START_TIME_SUFFIX, 0L); - map.put(ADVANCE, 0L); - map.put(ADVANCE + COUNT_SUFFIX, 1L); - map.put(ADVANCE + START_TIME_SUFFIX, 0L); - map.put(MATCH, 0L); - map.put(MATCH + COUNT_SUFFIX, 0L); - map.put(MATCH + START_TIME_SUFFIX, 0L); - map.put(SCORE, 0L); - map.put(SCORE + COUNT_SUFFIX, 2L); - map.put(SCORE + START_TIME_SUFFIX, 0L); - map.put(SHALLOW_ADVANCE, 0L); - map.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); - map.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); - map.put(COMPUTE_MAX_SCORE, 0L); - map.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); - map.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); - map.put(SET_MIN_COMPETITIVE_SCORE, 0L); - map.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); - map.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); - - Map breakdown = new HashMap<>(); - breakdown.put(CREATE_WEIGHT, 0L); - breakdown.put(CREATE_WEIGHT + COUNT_SUFFIX, 0L); - breakdown.put(CREATE_WEIGHT + START_TIME_SUFFIX, 0L); - breakdown.put(BUILD_SCORER, 9649L); - breakdown.put(BUILD_SCORER + COUNT_SUFFIX, 2L); - breakdown.put(BUILD_SCORER + START_TIME_SUFFIX, 1629732030749745L); - breakdown.put(NEXT_DOC, 1150L); - breakdown.put(NEXT_DOC + COUNT_SUFFIX, 2L); - breakdown.put(NEXT_DOC + START_TIME_SUFFIX, 1629732030806446L); - breakdown.put(ADVANCE, 920L); - breakdown.put(ADVANCE + COUNT_SUFFIX, 1L); - breakdown.put(ADVANCE + START_TIME_SUFFIX, 1629732030776129L); - breakdown.put(MATCH, 0L); - breakdown.put(MATCH + COUNT_SUFFIX, 0L); - breakdown.put(MATCH + START_TIME_SUFFIX, 0L); - breakdown.put(SCORE, 1050L); - breakdown.put(SCORE + COUNT_SUFFIX, 2L); - breakdown.put(SCORE + START_TIME_SUFFIX, 1629732030778977L); - breakdown.put(SHALLOW_ADVANCE, 0L); - breakdown.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); - breakdown.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); - breakdown.put(COMPUTE_MAX_SCORE, 0L); - breakdown.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); - breakdown.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); - breakdown.put(SET_MIN_COMPETITIVE_SCORE, 0L); - breakdown.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); - breakdown.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); - ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); - profileBreakdown.addMaxEndTimeAndMinStartTime(map, breakdown); - assertEquals(45, map.size()); - assertEquals( - "{set_min_competitive_score_count=0, match_count=0, score_start_time=0, shallow_advance_count=0, create_weight_start_time=1629732014278990, next_doc=0, compute_max_score_start_time=0, shallow_advance_min_start_time=0, score_count=2, compute_max_score_count=0, advance_start_time=0, advance=0, advance_count=1, compute_max_score_min_start_time=0, score=0, next_doc_max_end_time=1629732030807596, advance_max_end_time=1629732030777049, next_doc_start_time=0, shallow_advance=0, build_scorer_max_end_time=1629732030759394, create_weight_count=1, create_weight_max_end_time=1629732014480682, match_min_start_time=0, build_scorer=0, compute_max_score_max_end_time=0, next_doc_min_start_time=1629732030806446, set_min_competitive_score=0, set_min_competitive_score_start_time=0, match=0, set_min_competitive_score_max_end_time=0, match_start_time=0, shallow_advance_max_end_time=0, build_scorer_start_time=0, next_doc_count=2, shallow_advance_start_time=0, set_min_competitive_score_min_start_time=0, compute_max_score=0, create_weight_min_start_time=1629732014278990, build_scorer_count=2, create_weight=201692, score_min_start_time=1629732030778977, match_max_end_time=0, advance_min_start_time=1629732030776129, score_max_end_time=1629732030780027, build_scorer_min_start_time=1629732030749745}", - map.toString() - ); - } } diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index a278e7004cc93..d41e564be272c 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -89,6 +89,10 @@ public class QueryProfilerTests extends OpenSearchTestCase { private IndexReader reader; private ContextIndexSearcher searcher; private ExecutorService executor; + private static final String MAX_PREFIX = "max_"; + private static final String MIN_PREFIX = "min_"; + private static final String AVG_PREFIX = "avg_"; + private static final String TIMING_TYPE_COUNT_SUFFIX = "_count"; @ParametersFactory public static Collection concurrency() { @@ -168,12 +172,45 @@ public void testBasic() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + + if (results.get(0).getMaxSliceTime() != null) { + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + } long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); @@ -194,12 +231,45 @@ public void testNoScoring() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + + if (results.get(0).getMaxSliceTime() != null) { + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + } long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); @@ -234,12 +304,45 @@ public void testApproximations() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE.toString()), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH.toString()), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString() + "_count"), greaterThan(0L)); - assertThat(breakdown.get(QueryTimingType.ADVANCE.toString() + "_count"), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.SCORE.toString() + "_count"), equalTo(0L)); - assertThat(breakdown.get(QueryTimingType.MATCH.toString() + "_count"), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + + if (results.get(0).getMaxSliceTime() != null) { + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.NEXT_DOC + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.ADVANCE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + } long rewriteTime = profiler.getRewriteTime(); assertThat(rewriteTime, greaterThan(0L)); diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 739ba6fc15c76..9235ed5524eab 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -338,6 +338,14 @@ public void testMinScoreDisablesCountOptimization() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThanOrEqualTo(100L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(100L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThanOrEqualTo(100L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), equalTo(1L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), equalTo(1L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), equalTo(1L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, collector -> { @@ -477,6 +485,14 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, collector -> { @@ -547,6 +563,14 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, collector -> { @@ -585,6 +609,14 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -709,6 +741,14 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } assertThat(query.getProfiledChildren().get(1).getQueryName(), equalTo("TermQuery")); assertThat(query.getProfiledChildren().get(1).getTime(), greaterThan(0L)); @@ -716,6 +756,14 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } }, collector -> { assertThat(collector.getReason(), equalTo("search_terminate_after_count")); assertThat(collector.getTime(), greaterThan(0L)); @@ -1054,6 +1102,14 @@ public void testDisableTopScoreCollection() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThan(0L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); }, collector -> { @@ -1133,6 +1189,14 @@ public void testMinScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(10L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), equalTo(10L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), equalTo(10L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), equalTo(10L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -1210,6 +1274,14 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(1L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -1245,6 +1317,14 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(0L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(1L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); @@ -1315,6 +1395,14 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(6L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren(), empty()); @@ -1342,6 +1430,14 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); + if (query.getMaxSliceTime() != null) { + assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); + assertThat(query.getTimeBreakdown().get("max_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("min_score_count"), greaterThanOrEqualTo(6L)); + assertThat(query.getTimeBreakdown().get("avg_score_count"), greaterThanOrEqualTo(6L)); + } assertThat(query.getTimeBreakdown().get("create_weight"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren(), empty()); From adc9b51621f723d14b3307edcc6657ebc805230b Mon Sep 17 00:00:00 2001 From: Sorabh Hamirwasia Date: Wed, 30 Aug 2023 14:32:19 -0700 Subject: [PATCH 3/9] Update collectorToLeaves mapping for children breakdowns post profile metric collection and before creating the results Signed-off-by: Sorabh Hamirwasia --- .../profile/ContextualProfileBreakdown.java | 5 +++ .../ConcurrentQueryProfileBreakdown.java | 10 +++++ .../query/ConcurrentQueryProfileTree.java | 41 +++++++++++++++++++ 3 files changed, 56 insertions(+) diff --git a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java index 143c7df7c1233..b193878aba4b5 100644 --- a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java @@ -11,6 +11,9 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Collector; +import java.util.List; +import java.util.Map; + /** * Provide contextual profile breakdowns which are associated with freestyle context. Used when concurrent * search over segments is activated and each collector needs own non-shareable profile breakdown instance. @@ -30,4 +33,6 @@ public ContextualProfileBreakdown(Class clazz) { public abstract AbstractProfileBreakdown context(Object context); public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) {} + + public void associateCollectorToLeaves(Map> collectorToLeaves) {} } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index f35f87bb320d5..cd009b872587c 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -14,6 +14,7 @@ import org.opensearch.search.profile.ContextualProfileBreakdown; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -228,4 +229,13 @@ public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collect collectorToLeaves.put(collectorManager, leaves); } } + + @Override + public void associateCollectorToLeaves(Map> collectorToLeaves) { + this.collectorToLeaves.putAll(collectorToLeaves); + } + + Map> getCollectorToLeaves() { + return Collections.unmodifiableMap(collectorToLeaves); + } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java index b9d4a5b3c9b45..b5560165e43c6 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java @@ -8,10 +8,12 @@ package org.opensearch.search.profile.query; +import org.apache.lucene.index.LeafReaderContext; import org.opensearch.search.profile.ContextualProfileBreakdown; import org.opensearch.search.profile.ProfileResult; import java.util.List; +import java.util.Map; /** * This class returns a list of {@link ProfileResult} that can be serialized back to the client in the concurrent execution. @@ -46,4 +48,43 @@ protected ProfileResult createProfileResult( concurrentBreakdown.getAvgSliceNodeTime() ); } + + /** + * For concurrent query case, when there are nested queries (with children), then the {@link ConcurrentQueryProfileBreakdown} created + * for the child queries weight doesn't have the association of collector to leaves. This is because child query weights are not + * exposed by the {@link org.apache.lucene.search.Weight} interface. So after all the collection is happened and before the result + * tree is created we need to pass the association from parent to the child breakdowns. This will be then used to create the + * breakdown map at slice level for the child queries as well + * + * @return a hierarchical representation of the profiled query tree + */ + @Override + public List getTree() { + for (Integer root : roots) { + final ContextualProfileBreakdown parentBreakdown = breakdowns.get(root); + assert parentBreakdown instanceof ConcurrentQueryProfileBreakdown; + final Map> parentCollectorToLeaves = ((ConcurrentQueryProfileBreakdown) parentBreakdown) + .getCollectorToLeaves(); + // update all the children with the parent collectorToLeaves association + updateCollectorToLeavesForChildBreakdowns(root, parentCollectorToLeaves); + } + // once the collector to leaves mapping is updated, get the result + return super.getTree(); + } + + /** + * Updates the children with collector to leaves mapping as recorded by parent breakdown + * @param parentToken parent token number in the tree + * @param collectorToLeaves collector to leaves mapping recorded by parent + */ + private void updateCollectorToLeavesForChildBreakdowns(Integer parentToken, Map> collectorToLeaves) { + final List children = tree.get(parentToken); + if (children != null) { + for (Integer currentChild : children) { + final ContextualProfileBreakdown currentChildBreakdown = breakdowns.get(currentChild); + currentChildBreakdown.associateCollectorToLeaves(collectorToLeaves); + updateCollectorToLeavesForChildBreakdowns(currentChild, collectorToLeaves); + } + } + } } From 0868b20722746a60055c31fec4ec48d93edcbc9a Mon Sep 17 00:00:00 2001 From: Sorabh Hamirwasia Date: Wed, 30 Aug 2023 18:08:02 -0700 Subject: [PATCH 4/9] Refactor logic to compute the slice level breakdown stats and query level breakdown stats for concurrent search case Signed-off-by: Sorabh Hamirwasia --- .../profile/ContextualProfileBreakdown.java | 4 +- .../ConcurrentQueryProfileBreakdown.java | 327 ++++++++++-------- .../query/ConcurrentQueryProfileTree.java | 7 +- .../search/profile/query/ProfileWeight.java | 2 +- 4 files changed, 182 insertions(+), 158 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java index b193878aba4b5..94ac8683fe2b9 100644 --- a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java @@ -32,7 +32,7 @@ public ContextualProfileBreakdown(Class clazz) { */ public abstract AbstractProfileBreakdown context(Object context); - public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) {} + public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) {} - public void associateCollectorToLeaves(Map> collectorToLeaves) {} + public void associateCollectorToLeaves(Map> collectorToLeaves) {} } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index cd009b872587c..0cd085b79f166 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -28,19 +28,22 @@ * @opensearch.internal */ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBreakdown { - public static final String MAX_END_TIME_SUFFIX = "_max_end_time"; - public static final String MIN_START_TIME_SUFFIX = "_min_start_time"; + private static final String SLICE_END_TIME_SUFFIX = "_slice_end_time"; + private static final String SLICE_START_TIME_SUFFIX = "_slice_start_time"; private static final String MAX_PREFIX = "max_"; private static final String MIN_PREFIX = "min_"; private static final String AVG_PREFIX = "avg_"; - private long breakdownMaxEndTime = Long.MIN_VALUE; - private long breakdownMinStartTime = Long.MAX_VALUE; + private long queryEndTime = Long.MIN_VALUE; + private long queryStartTime = Long.MAX_VALUE; private long maxSliceNodeTime = Long.MIN_VALUE; private long minSliceNodeTime = Long.MAX_VALUE; private long avgSliceNodeTime = 0L; - private long nodeTimeSum = 0L; + + // keep track of all breakdown timings per segment private final Map> contexts = new ConcurrentHashMap<>(); - private Map> collectorToLeaves = new HashMap<>(); + + // represents slice to leaves mapping as for each slice a unique collector instance is created + private final Map> sliceCollectorToLeaves = new HashMap<>(); /** Sole constructor. */ public ConcurrentQueryProfileBreakdown() { @@ -61,181 +64,201 @@ public AbstractProfileBreakdown context(Object context) { @Override public Map toBreakdownMap() { - Map> sliceLevelBreakdown = new HashMap<>(); - if (collectorToLeaves == null || collectorToLeaves.isEmpty() || contexts.isEmpty()) { - return new HashMap<>(super.toBreakdownMap()); + if (sliceCollectorToLeaves.isEmpty()) { + return super.toBreakdownMap(); } - for (Map.Entry> slice : collectorToLeaves.entrySet()) { - String sliceCollector = slice.getKey(); - List leaves = slice.getValue(); - long lastLeafEndTime = 0L; - for (LeafReaderContext leaf : leaves) { - long currentLeafEndTime = 0L; - Map currentLeafBreakdownMap = contexts.get(leaf).toBreakdownMap(); - for (Map.Entry breakDownEntry : currentLeafBreakdownMap.entrySet()) { - Map currentSliceBreakdown = sliceLevelBreakdown.getOrDefault( - sliceCollector, - new HashMap<>(super.toBreakdownMap()) - ); - String breakdownType = breakDownEntry.getKey(); - Long breakdownValue = breakDownEntry.getValue(); - // Adding breakdown type count - if (breakdownType.contains(TIMING_TYPE_COUNT_SUFFIX)) { - currentSliceBreakdown.merge(breakdownType, breakdownValue, Long::sum); - } - if (breakdownType.contains(TIMING_TYPE_START_TIME_SUFFIX)) { - // Finding the earliest start time and the last end time for each breakdown types within the current slice to - // compute the total breakdown time - String method = breakdownType.split(TIMING_TYPE_START_TIME_SUFFIX)[0]; - String maxEndTimeKey = method + MAX_END_TIME_SUFFIX; - String minStartTimeKey = method + MIN_START_TIME_SUFFIX; - long maxEndTime = Math.max( - currentSliceBreakdown.getOrDefault(maxEndTimeKey, Long.MIN_VALUE), - breakdownValue + currentLeafBreakdownMap.get(method) - ); - long minStartTime = Math.min(currentSliceBreakdown.getOrDefault(minStartTimeKey, Long.MAX_VALUE), breakdownValue); - currentSliceBreakdown.put(maxEndTimeKey, maxEndTime); - currentSliceBreakdown.put(minStartTimeKey, minStartTime); - // Finding the current leaf end time to compute the last leaf end time - currentLeafEndTime = Math.max(currentLeafEndTime, maxEndTime); - } - sliceLevelBreakdown.put(sliceCollector, currentSliceBreakdown); - } - lastLeafEndTime = Math.max(lastLeafEndTime, currentLeafEndTime); - } - // Computing breakdown type total time - for (QueryTimingType queryTimingType : QueryTimingType.values()) { - String timingType = queryTimingType.toString(); - if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { - long currentSliceMaxEndTime = sliceLevelBreakdown.get(sliceCollector).get(timingType + MAX_END_TIME_SUFFIX); - long currentSliceMinStartTime = sliceLevelBreakdown.get(sliceCollector).get(timingType + MIN_START_TIME_SUFFIX); - sliceLevelBreakdown.get(sliceCollector).put(timingType, currentSliceMaxEndTime - currentSliceMinStartTime); - } - } - // Computing slice level node time and stats - long createWeightStartTime = sliceLevelBreakdown.get(sliceCollector) - .get(QueryTimingType.CREATE_WEIGHT + AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX); - breakdownMaxEndTime = Math.max(breakdownMaxEndTime, lastLeafEndTime); - breakdownMinStartTime = createWeightStartTime; - long currentSliceNodeTime = lastLeafEndTime - createWeightStartTime; - maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime); - minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime); - nodeTimeSum += currentSliceNodeTime; - } - // Computing avg slice node time - avgSliceNodeTime = nodeTimeSum / collectorToLeaves.size(); - return buildFinalBreakdownMap(sliceLevelBreakdown); + + final Map topLevelBreakdownMapWithWeightTime = super.toBreakdownMap(); + final long createWeightStartTime = topLevelBreakdownMapWithWeightTime.get( + QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_START_TIME_SUFFIX + ); + queryStartTime = createWeightStartTime; + // first create the slice level breakdowns + final Map> sliceLevelBreakdowns = buildSliceLevelBreakdown(createWeightStartTime); + return buildQueryBreakdownMap(sliceLevelBreakdowns, createWeightStartTime); } /** - * This method is used to construct the final query profile breakdown map for return. - * - * @param sliceLevelBreakdown a timing count breakdown map with the max end time and min start time information + * Computes the slice level breakdownMap. It uses sliceCollectorToLeaves to figure out all the leaves or segments part of a slice. + * Then use the breakdown timing stats for each of these leaves to calculate the breakdown stats at slice level. + * @param createWeightStartTime start time when createWeight is called + * @return map of collector (or slice) to breakdown map */ - Map buildFinalBreakdownMap(Map> sliceLevelBreakdown) { - final Map breakdownMap = new HashMap<>(); - final Map totalTimeMap = new HashMap<>(); - int sliceCount = sliceLevelBreakdown.size(); - for (Map.Entry> slice : sliceLevelBreakdown.entrySet()) { - for (QueryTimingType queryTimingType : QueryTimingType.values()) { - String timingType = queryTimingType.toString(); - // Computing the time/count stats for each breakdown across all slices - if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { - String maxBreakdownTypeTime = MAX_PREFIX + timingType; - String minBreakdownTypeTime = MIN_PREFIX + timingType; - String avgBreakdownTypeTime = AVG_PREFIX + timingType; - String maxBreakdownTypeCount = maxBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; - String minBreakdownTypeCount = minBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; - String avgBreakdownTypeCount = avgBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; - Long breakdownTime = slice.getValue().get(timingType); - Long breakdownCount = slice.getValue().get(timingType + TIMING_TYPE_COUNT_SUFFIX); - breakdownMap.put( - maxBreakdownTypeTime, - Math.max(breakdownMap.getOrDefault(maxBreakdownTypeTime, Long.MIN_VALUE), breakdownTime) - ); - breakdownMap.put( - minBreakdownTypeTime, - Math.min(breakdownMap.getOrDefault(minBreakdownTypeTime, Long.MAX_VALUE), breakdownTime) + private Map> buildSliceLevelBreakdown(long createWeightStartTime) { + final Map> sliceLevelBreakdowns = new HashMap<>(); + long totalSliceNodeTime = 0; + for (Map.Entry> slice : sliceCollectorToLeaves.entrySet()) { + final Collector sliceCollector = slice.getKey(); + // initialize each slice level breakdown + final Map currentSliceBreakdown = sliceLevelBreakdowns.computeIfAbsent(sliceCollector, k -> new HashMap<>()); + // max slice end time across all timing types + long sliceMaxEndTime = Long.MIN_VALUE; + for (QueryTimingType timingType : QueryTimingType.values()) { + if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) { + // do nothing for create weight as that is query level time and not slice level + continue; + } + + // for each timing type compute maxSliceEndTime and minSliceStartTime. Also add the counts of timing type to + // compute total count at slice level + final String timingTypeCountKey = timingType + TIMING_TYPE_COUNT_SUFFIX; + final String timingTypeStartKey = timingType + TIMING_TYPE_START_TIME_SUFFIX; + final String timingTypeSliceStartTimeKey = timingType + SLICE_START_TIME_SUFFIX; + final String timingTypeSliceEndTimeKey = timingType + SLICE_END_TIME_SUFFIX; + + for (LeafReaderContext sliceLeaf : slice.getValue()) { + final Map currentSliceLeafBreakdownMap = contexts.get(sliceLeaf).toBreakdownMap(); + // get the count for current leaf timing type + currentSliceBreakdown.compute( + timingTypeCountKey, + (key, value) -> (value == null) ? 0 : value + currentSliceLeafBreakdownMap.get(timingTypeCountKey) ); - breakdownMap.put(avgBreakdownTypeTime, breakdownMap.getOrDefault(avgBreakdownTypeTime, 0L) + breakdownTime); - breakdownMap.put( - maxBreakdownTypeCount, - Math.max(breakdownMap.getOrDefault(maxBreakdownTypeCount, Long.MIN_VALUE), breakdownCount) + + // compute the sliceEndTime for timingType using max of endTime across slice leaves + final long sliceLeafTimingTypeEndTime = currentSliceLeafBreakdownMap.get(timingTypeStartKey) + + currentSliceLeafBreakdownMap.get(timingType.toString()); + currentSliceBreakdown.compute( + timingTypeSliceEndTimeKey, + (key, value) -> (value == null) ? sliceLeafTimingTypeEndTime : Math.max(value, sliceLeafTimingTypeEndTime) ); - breakdownMap.put( - minBreakdownTypeCount, - Math.min(breakdownMap.getOrDefault(minBreakdownTypeCount, Long.MAX_VALUE), breakdownCount) + + // compute the sliceStartTime for timingType using min of startTime across slice leaves + final long sliceLeafTimingTypeStartTime = currentSliceLeafBreakdownMap.get(timingTypeStartKey); + currentSliceBreakdown.compute( + timingTypeSliceStartTimeKey, + (key, value) -> (value == null) ? sliceLeafTimingTypeStartTime : Math.min(value, sliceLeafTimingTypeStartTime) ); - breakdownMap.put(avgBreakdownTypeCount, breakdownMap.getOrDefault(avgBreakdownTypeCount, 0L) + breakdownCount); - } - // Finding the max slice end time and min slice start time to compute total time - String maxEndTime = timingType + MAX_END_TIME_SUFFIX; - String minStartTime = timingType + MIN_START_TIME_SUFFIX; - Long maxEndTimeValue = slice.getValue().get(timingType + MAX_END_TIME_SUFFIX); - Long minStartTimeValue = slice.getValue().get(timingType + MIN_START_TIME_SUFFIX); - totalTimeMap.put(maxEndTime, Math.max(totalTimeMap.getOrDefault(maxEndTime, Long.MIN_VALUE), maxEndTimeValue)); - totalTimeMap.put(minStartTime, Math.min(totalTimeMap.getOrDefault(minStartTime, Long.MAX_VALUE), minStartTimeValue)); - // Computing the total count for each breakdown across all slices - String breakdownCount = timingType + TIMING_TYPE_COUNT_SUFFIX; - Long breakdownCountValue = slice.getValue().get(breakdownCount); - if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { - breakdownMap.put(breakdownCount, breakdownMap.getOrDefault(breakdownCount, 0L) + breakdownCountValue); - } else { - breakdownMap.put(breakdownCount, 1L); - breakdownMap.put(timingType, slice.getValue().get(timingType)); } + // compute sliceMaxEndTime as max of sliceEndTime across all timing types + sliceMaxEndTime = Math.max(sliceMaxEndTime, currentSliceBreakdown.get(timingTypeSliceEndTimeKey)); + // compute total time for each timing type at slice level using sliceEndTime and sliceStartTime + currentSliceBreakdown.put( + timingType.toString(), + currentSliceBreakdown.get(timingTypeSliceEndTimeKey) - currentSliceBreakdown.get(timingTypeSliceStartTimeKey) + ); } + // currentSliceNodeTime includes the create weight time as well which will be same for all the slices + long currentSliceNodeTime = sliceMaxEndTime - createWeightStartTime; + // compute max/min slice times + maxSliceNodeTime = Math.max(maxSliceNodeTime, currentSliceNodeTime); + minSliceNodeTime = Math.min(minSliceNodeTime, currentSliceNodeTime); + // total time at query level + totalSliceNodeTime += currentSliceNodeTime; } + avgSliceNodeTime = totalSliceNodeTime / sliceCollectorToLeaves.size(); + return sliceLevelBreakdowns; + } + + /** + * Computes the query level breakdownMap using the breakdown maps of all the slices. In query level breakdown map, it has the + * time/count stats for each breakdown type. Total time per breakdown type at query level is computed by subtracting the max of slice + * end time with min of slice start time for that type. Count for each breakdown type at query level is sum of count of that type + * across slices. Other than these, there are max/min/avg stats across slices for each breakdown type + * @param sliceLevelBreakdowns breakdown map for all the slices + * @param createWeightStartTime start time of create weight + * @return breakdown map for entire query + */ + private Map buildQueryBreakdownMap(Map> sliceLevelBreakdowns, long createWeightStartTime) { + final Map queryBreakdownMap = new HashMap<>(); for (QueryTimingType queryTimingType : QueryTimingType.values()) { - String timingType = queryTimingType.toString(); - if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { - // Computing avg time/count stats - String avgBreakdownTypeTime = AVG_PREFIX + timingType; - String avgBreakdownTypeCount = avgBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; - breakdownMap.put(avgBreakdownTypeTime, breakdownMap.get(avgBreakdownTypeTime) / sliceCount); - breakdownMap.put(avgBreakdownTypeCount, breakdownMap.get(avgBreakdownTypeCount) / sliceCount); - // Computing total time for each breakdown including any wait time - Long maxSliceEndTime = totalTimeMap.get(timingType + MAX_END_TIME_SUFFIX); - Long mingSliceStartTime = totalTimeMap.get(timingType + MIN_START_TIME_SUFFIX); - breakdownMap.put(timingType, maxSliceEndTime - mingSliceStartTime); + final String timingTypeKey = queryTimingType.toString(); + final String timingTypeCountKey = timingTypeKey + TIMING_TYPE_COUNT_SUFFIX; + final String sliceEndTimeForTimingType = timingTypeKey + SLICE_END_TIME_SUFFIX; + final String sliceStartTimeForTimingType = timingTypeKey + SLICE_START_TIME_SUFFIX; + + final String maxBreakdownTypeTime = MAX_PREFIX + timingTypeKey; + final String minBreakdownTypeTime = MIN_PREFIX + timingTypeKey; + final String avgBreakdownTypeTime = AVG_PREFIX + timingTypeKey; + final String maxBreakdownTypeCount = maxBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + final String minBreakdownTypeCount = minBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + final String avgBreakdownTypeCount = avgBreakdownTypeTime + TIMING_TYPE_COUNT_SUFFIX; + + long queryTimingTypeEndTime = Long.MIN_VALUE; + long queryTimingTypeStartTime = Long.MAX_VALUE; + long queryTimingTypeCount = 0L; + + // the create weight time is computed at the query level and is called only once per query + if (queryTimingType == QueryTimingType.CREATE_WEIGHT) { + queryBreakdownMap.put(timingTypeCountKey, 1L); + queryBreakdownMap.put(timingTypeKey, createWeightStartTime); + continue; } + + // for all other timing types, we will compute min/max/avg/total across slices + for (Map.Entry> sliceBreakdown : sliceLevelBreakdowns.entrySet()) { + Long sliceBreakdownTypeTime = sliceBreakdown.getValue().get(timingTypeKey); + Long sliceBreakdownTypeCount = sliceBreakdown.getValue().get(timingTypeCountKey); + // compute max/min/avg TimingType time across slices + queryBreakdownMap.compute( + maxBreakdownTypeTime, + (key, value) -> (value == null) ? sliceBreakdownTypeTime : Math.max(sliceBreakdownTypeTime, value) + ); + queryBreakdownMap.compute( + minBreakdownTypeTime, + (key, value) -> (value == null) ? sliceBreakdownTypeTime : Math.min(sliceBreakdownTypeTime, value) + ); + queryBreakdownMap.compute( + avgBreakdownTypeTime, + (key, value) -> (value == null) ? sliceBreakdownTypeTime : sliceBreakdownTypeTime + value + ); + + // compute max/min/avg TimingType count across slices + queryBreakdownMap.compute( + maxBreakdownTypeCount, + (key, value) -> (value == null) ? sliceBreakdownTypeCount : Math.max(sliceBreakdownTypeCount, value) + ); + queryBreakdownMap.compute( + minBreakdownTypeCount, + (key, value) -> (value == null) ? sliceBreakdownTypeCount : Math.min(sliceBreakdownTypeCount, value) + ); + queryBreakdownMap.compute( + avgBreakdownTypeCount, + (key, value) -> (value == null) ? sliceBreakdownTypeCount : sliceBreakdownTypeCount + value + ); + + // query start/end time for a TimingType is min/max of start/end time across slices for that TimingType + queryTimingTypeEndTime = Math.max(queryTimingTypeEndTime, sliceBreakdown.getValue().get(sliceEndTimeForTimingType)); + queryTimingTypeStartTime = Math.min(queryTimingTypeStartTime, sliceBreakdown.getValue().get(sliceStartTimeForTimingType)); + queryTimingTypeCount += sliceBreakdownTypeCount; + } + queryBreakdownMap.put(timingTypeKey, queryTimingTypeEndTime - queryTimingTypeStartTime); + queryBreakdownMap.put(timingTypeCountKey, queryTimingTypeCount); + queryBreakdownMap.compute(avgBreakdownTypeTime, (key, value) -> (value == null) ? 0 : value / sliceLevelBreakdowns.size()); + queryBreakdownMap.compute(avgBreakdownTypeCount, (key, value) -> (value == null) ? 0 : value / sliceLevelBreakdowns.size()); + // compute query end time using max of query end time across all timing types + queryEndTime = Math.max(queryEndTime, queryTimingTypeEndTime); } - return breakdownMap; + return queryBreakdownMap; } @Override public long toNodeTime() { - return breakdownMaxEndTime - breakdownMinStartTime; + return queryEndTime - queryStartTime; } - public Long getMaxSliceNodeTime() { - return maxSliceNodeTime; + @Override + public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) { + sliceCollectorToLeaves.computeIfAbsent(collector, k -> new ArrayList<>()).add(leaf); } - public Long getMinSliceNodeTime() { - return minSliceNodeTime; + @Override + public void associateCollectorToLeaves(Map> collectorToLeaves) { + this.sliceCollectorToLeaves.putAll(collectorToLeaves); } - public Long getAvgSliceNodeTime() { - return avgSliceNodeTime; + Map> getSliceCollectorToLeaves() { + return Collections.unmodifiableMap(sliceCollectorToLeaves); } - @Override - public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) { - if (collector != null) { - String collectorManager = collector.toString(); - List leaves = collectorToLeaves.getOrDefault(collectorManager, new ArrayList<>()); - leaves.add(leaf); - collectorToLeaves.put(collectorManager, leaves); - } + Long getMaxSliceNodeTime() { + return maxSliceNodeTime; } - @Override - public void associateCollectorToLeaves(Map> collectorToLeaves) { - this.collectorToLeaves.putAll(collectorToLeaves); + Long getMinSliceNodeTime() { + return minSliceNodeTime; } - Map> getCollectorToLeaves() { - return Collections.unmodifiableMap(collectorToLeaves); + Long getAvgSliceNodeTime() { + return avgSliceNodeTime; } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java index b5560165e43c6..8b3d605685a47 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java @@ -9,6 +9,7 @@ package org.opensearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Collector; import org.opensearch.search.profile.ContextualProfileBreakdown; import org.opensearch.search.profile.ProfileResult; @@ -63,8 +64,8 @@ public List getTree() { for (Integer root : roots) { final ContextualProfileBreakdown parentBreakdown = breakdowns.get(root); assert parentBreakdown instanceof ConcurrentQueryProfileBreakdown; - final Map> parentCollectorToLeaves = ((ConcurrentQueryProfileBreakdown) parentBreakdown) - .getCollectorToLeaves(); + final Map> parentCollectorToLeaves = ((ConcurrentQueryProfileBreakdown) parentBreakdown) + .getSliceCollectorToLeaves(); // update all the children with the parent collectorToLeaves association updateCollectorToLeavesForChildBreakdowns(root, parentCollectorToLeaves); } @@ -77,7 +78,7 @@ public List getTree() { * @param parentToken parent token number in the tree * @param collectorToLeaves collector to leaves mapping recorded by parent */ - private void updateCollectorToLeavesForChildBreakdowns(Integer parentToken, Map> collectorToLeaves) { + private void updateCollectorToLeavesForChildBreakdowns(Integer parentToken, Map> collectorToLeaves) { final List children = tree.get(parentToken); if (children != null) { for (Integer currentChild : children) { diff --git a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java index 0cdacdbeb46d9..c7e70d8d88007 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ProfileWeight.java @@ -139,6 +139,6 @@ public boolean isCacheable(LeafReaderContext ctx) { } public void associateCollectorToLeaves(LeafReaderContext leaf, Collector collector) { - profile.associateCollectorToLeaves(leaf, collector); + profile.associateCollectorToLeaves(collector, leaf); } } From b5af44ec10904538776d41d34967efcf1e16b758 Mon Sep 17 00:00:00 2001 From: Ticheng Lin Date: Thu, 31 Aug 2023 12:09:21 -0700 Subject: [PATCH 5/9] Fix QueryProfilePhaseTests and QueryProfileTests, and parameterize QueryProfilerIT with concurrent search enabled Signed-off-by: Ticheng Lin --- .../search/profile/query/QueryProfilerIT.java | 25 ++++++++++- .../ConcurrentQueryProfileBreakdown.java | 15 +++++-- .../profile/query/QueryProfilerTests.java | 41 ++++++++++++++++--- .../search/query/QueryProfilePhaseTests.java | 26 ++++++------ 4 files changed, 84 insertions(+), 23 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 5f794d2abf878..39966888fdff4 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -32,6 +32,8 @@ package org.opensearch.search.profile.query; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.apache.lucene.tests.util.English; import org.opensearch.action.index.IndexRequestBuilder; import org.opensearch.action.search.MultiSearchResponse; @@ -40,20 +42,23 @@ import org.opensearch.action.search.SearchType; import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.index.query.QueryBuilder; import org.opensearch.index.query.QueryBuilders; import org.opensearch.search.SearchHit; import org.opensearch.search.profile.ProfileResult; import org.opensearch.search.profile.ProfileShardResult; import org.opensearch.search.sort.SortOrder; -import org.opensearch.test.OpenSearchIntegTestCase; +import org.opensearch.test.ParameterizedOpenSearchIntegTestCase; import java.util.Arrays; +import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import static org.opensearch.search.SearchService.CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING; import static org.opensearch.search.profile.query.RandomQueryGenerator.randomQueryBuilder; import static org.hamcrest.Matchers.emptyOrNullString; import static org.hamcrest.Matchers.equalTo; @@ -62,7 +67,23 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -public class QueryProfilerIT extends OpenSearchIntegTestCase { +public class QueryProfilerIT extends ParameterizedOpenSearchIntegTestCase { + public QueryProfilerIT(Settings settings) { + super(settings); + } + + @ParametersFactory + public static Collection parameters() { + return Arrays.asList( + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + ); + } + + @Override + protected Settings featureFlagSettings() { + return Settings.builder().put(super.featureFlagSettings()).put(FeatureFlags.CONCURRENT_SEGMENT_SEARCH, "true").build(); + } /** * This test simply checks to make sure nothing crashes. Test indexes 100-150 documents, diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 0cd085b79f166..82ba2ba3c8def 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -64,10 +64,10 @@ public AbstractProfileBreakdown context(Object context) { @Override public Map toBreakdownMap() { - if (sliceCollectorToLeaves.isEmpty()) { + if (sliceCollectorToLeaves.isEmpty() || contexts.isEmpty()) { + // If there are no leaf contexts, then return the top level breakdown, which will include the create_weight time/count. return super.toBreakdownMap(); } - final Map topLevelBreakdownMapWithWeightTime = super.toBreakdownMap(); final long createWeightStartTime = topLevelBreakdownMapWithWeightTime.get( QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_START_TIME_SUFFIX @@ -107,11 +107,20 @@ private Map> buildSliceLevelBreakdown(long createWe final String timingTypeSliceEndTimeKey = timingType + SLICE_END_TIME_SUFFIX; for (LeafReaderContext sliceLeaf : slice.getValue()) { + if (!contexts.containsKey(sliceLeaf)) { + // In case like early termination, the sliceCollectorToLeave association will be added for a + // leaf, but the leaf level breakdown will not be created in the contexts map. + // This is because before updating the contexts map, the query hits earlyTerminationException. + // To handle such case, we will ignore the leaf that is not present. + continue; + } final Map currentSliceLeafBreakdownMap = contexts.get(sliceLeaf).toBreakdownMap(); // get the count for current leaf timing type currentSliceBreakdown.compute( timingTypeCountKey, - (key, value) -> (value == null) ? 0 : value + currentSliceLeafBreakdownMap.get(timingTypeCountKey) + (key, value) -> (value == null) + ? currentSliceLeafBreakdownMap.get(timingTypeCountKey) + : value + currentSliceLeafBreakdownMap.get(timingTypeCountKey) ); // compute the sliceEndTime for timingType using max of endTime across slice leaves diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index d41e564be272c..e3e2f8b851b94 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -81,10 +81,14 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class QueryProfilerTests extends OpenSearchTestCase { + private final int concurrency; private Directory dir; private IndexReader reader; private ContextIndexSearcher searcher; @@ -101,6 +105,7 @@ public static Collection concurrency() { public QueryProfilerTests(int concurrency) { this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null; + this.concurrency = concurrency; } @Before @@ -164,7 +169,8 @@ public void testBasic() throws IOException { searcher.search(query, 1); List results = profiler.getTree(); assertEquals(1, results.size()); - Map breakdown = results.get(0).getTimeBreakdown(); + ProfileResult profileResult = results.get(0); + Map breakdown = profileResult.getTimeBreakdown(); assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L)); @@ -179,7 +185,10 @@ public void testBasic() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); - if (results.get(0).getMaxSliceTime() != null) { + if (concurrency > 0) { + assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); @@ -210,6 +219,10 @@ public void testBasic() throws IOException { assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + } else { + assertThat(profileResult.getMaxSliceTime(), is(nullValue())); + assertThat(profileResult.getMinSliceTime(), is(nullValue())); + assertThat(profileResult.getAvgSliceTime(), is(nullValue())); } long rewriteTime = profiler.getRewriteTime(); @@ -223,7 +236,8 @@ public void testNoScoring() throws IOException { searcher.search(query, 1, Sort.INDEXORDER); // scores are not needed List results = profiler.getTree(); assertEquals(1, results.size()); - Map breakdown = results.get(0).getTimeBreakdown(); + ProfileResult profileResult = results.get(0); + Map breakdown = profileResult.getTimeBreakdown(); assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L)); @@ -238,7 +252,10 @@ public void testNoScoring() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); - if (results.get(0).getMaxSliceTime() != null) { + if (concurrency > 0) { + assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); @@ -269,6 +286,10 @@ public void testNoScoring() throws IOException { assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); + } else { + assertThat(profileResult.getMaxSliceTime(), is(nullValue())); + assertThat(profileResult.getMinSliceTime(), is(nullValue())); + assertThat(profileResult.getAvgSliceTime(), is(nullValue())); } long rewriteTime = profiler.getRewriteTime(); @@ -296,7 +317,8 @@ public void testApproximations() throws IOException { searcher.count(query); List results = profiler.getTree(); assertEquals(1, results.size()); - Map breakdown = results.get(0).getTimeBreakdown(); + ProfileResult profileResult = results.get(0); + Map breakdown = profileResult.getTimeBreakdown(); assertThat(breakdown.get(QueryTimingType.CREATE_WEIGHT.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.BUILD_SCORER.toString()), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.NEXT_DOC.toString()), greaterThan(0L)); @@ -311,7 +333,10 @@ public void testApproximations() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); - if (results.get(0).getMaxSliceTime() != null) { + if (concurrency > 0) { + assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); + assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.BUILD_SCORER), greaterThan(0L)); @@ -342,6 +367,10 @@ public void testApproximations() throws IOException { assertThat(breakdown.get(MAX_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); assertThat(breakdown.get(MIN_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); assertThat(breakdown.get(AVG_PREFIX + QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); + } else { + assertThat(profileResult.getMaxSliceTime(), is(nullValue())); + assertThat(profileResult.getMinSliceTime(), is(nullValue())); + assertThat(profileResult.getAvgSliceTime(), is(nullValue())); } long rewriteTime = profiler.getRewriteTime(); diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 9235ed5524eab..42f77ebf2d998 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -97,6 +97,7 @@ import static org.mockito.Mockito.when; public class QueryProfilePhaseTests extends IndexShardTestCase { + private final int concurrency; private IndexShard indexShard; private final ExecutorService executor; private final QueryPhaseSearcher queryPhaseSearcher; @@ -112,6 +113,7 @@ public static Collection concurrency() { public QueryProfilePhaseTests(int concurrency, QueryPhaseSearcher queryPhaseSearcher) { this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null; this.queryPhaseSearcher = queryPhaseSearcher; + this.concurrency = concurrency; } @Override @@ -338,7 +340,7 @@ public void testMinScoreDisablesCountOptimization() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThanOrEqualTo(100L)); @@ -485,7 +487,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -563,7 +565,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -609,7 +611,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -741,7 +743,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -756,7 +758,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1102,7 +1104,7 @@ public void testDisableTopScoreCollection() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1189,7 +1191,7 @@ public void testMinScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(10L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1274,7 +1276,7 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1317,7 +1319,7 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1395,7 +1397,7 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1430,7 +1432,7 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (query.getMaxSliceTime() != null) { + if (concurrency > 0) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); From c9fcca0dc84bc0d9f435cad9f08e5405f59d67a4 Mon Sep 17 00:00:00 2001 From: Sorabh Hamirwasia Date: Thu, 31 Aug 2023 12:28:54 -0700 Subject: [PATCH 6/9] Handle the case when there are no leaf context to compute the profile stats to return default stats for all breakdown type along with min/max/avg values. Replace queryStart and queryEnd time with queryNodeTime Signed-off-by: Sorabh Hamirwasia --- .../ConcurrentQueryProfileBreakdown.java | 83 ++++++++++--- .../ConcurrentQueryProfileBreakdownTests.java | 110 +++++++++--------- 2 files changed, 122 insertions(+), 71 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 82ba2ba3c8def..49309df32d3c0 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -33,8 +33,7 @@ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBrea private static final String MAX_PREFIX = "max_"; private static final String MIN_PREFIX = "min_"; private static final String AVG_PREFIX = "avg_"; - private long queryEndTime = Long.MIN_VALUE; - private long queryStartTime = Long.MAX_VALUE; + private long queryNodeTime = Long.MIN_VALUE; private long maxSliceNodeTime = Long.MIN_VALUE; private long minSliceNodeTime = Long.MAX_VALUE; private long avgSliceNodeTime = 0L; @@ -64,18 +63,62 @@ public AbstractProfileBreakdown context(Object context) { @Override public Map toBreakdownMap() { - if (sliceCollectorToLeaves.isEmpty() || contexts.isEmpty()) { - // If there are no leaf contexts, then return the top level breakdown, which will include the create_weight time/count. - return super.toBreakdownMap(); - } final Map topLevelBreakdownMapWithWeightTime = super.toBreakdownMap(); final long createWeightStartTime = topLevelBreakdownMapWithWeightTime.get( QueryTimingType.CREATE_WEIGHT + TIMING_TYPE_START_TIME_SUFFIX ); - queryStartTime = createWeightStartTime; + final long createWeightTime = topLevelBreakdownMapWithWeightTime.get(QueryTimingType.CREATE_WEIGHT.toString()); + + if (sliceCollectorToLeaves.isEmpty() || contexts.isEmpty()) { + // If there are no leaf contexts, then return the default concurrent query level breakdown, which will include the + // create_weight time/count + queryNodeTime = createWeightTime; + maxSliceNodeTime = queryNodeTime; + minSliceNodeTime = queryNodeTime; + avgSliceNodeTime = queryNodeTime; + return buildDefaultQueryBreakdownMap(createWeightTime); + } + // first create the slice level breakdowns final Map> sliceLevelBreakdowns = buildSliceLevelBreakdown(createWeightStartTime); - return buildQueryBreakdownMap(sliceLevelBreakdowns, createWeightStartTime); + return buildQueryBreakdownMap(sliceLevelBreakdowns, createWeightTime, createWeightStartTime); + } + + /** + * @param createWeightTime time for creating weight + * @return default breakdown map for concurrent query which includes the create weight time and all other timing type stats in the + * breakdown has default value of 0. For concurrent search case, the max/min/avg stats for each timing type will also be 0 in this + * default breakdown map. + */ + private Map buildDefaultQueryBreakdownMap(long createWeightTime) { + final Map concurrentQueryBreakdownMap = new HashMap<>(); + for (QueryTimingType timingType : QueryTimingType.values()) { + final String timingTypeKey = timingType.toString(); + final String timingTypeCountKey = timingTypeKey + TIMING_TYPE_COUNT_SUFFIX; + + if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) { + concurrentQueryBreakdownMap.put(timingTypeKey, createWeightTime); + concurrentQueryBreakdownMap.put(timingTypeCountKey, 1L); + continue; + } + final String maxBreakdownTypeTime = MAX_PREFIX + timingTypeKey; + final String minBreakdownTypeTime = MIN_PREFIX + timingTypeKey; + final String avgBreakdownTypeTime = AVG_PREFIX + timingTypeKey; + final String maxBreakdownTypeCount = MAX_PREFIX + timingTypeCountKey; + final String minBreakdownTypeCount = MIN_PREFIX + timingTypeCountKey; + final String avgBreakdownTypeCount = AVG_PREFIX + timingTypeCountKey; + // add time related stats + concurrentQueryBreakdownMap.put(timingTypeKey, 0L); + concurrentQueryBreakdownMap.put(maxBreakdownTypeTime, 0L); + concurrentQueryBreakdownMap.put(minBreakdownTypeTime, 0L); + concurrentQueryBreakdownMap.put(avgBreakdownTypeTime, 0L); + // add count related stats + concurrentQueryBreakdownMap.put(timingTypeCountKey, 0L); + concurrentQueryBreakdownMap.put(maxBreakdownTypeCount, 0L); + concurrentQueryBreakdownMap.put(minBreakdownTypeCount, 0L); + concurrentQueryBreakdownMap.put(avgBreakdownTypeCount, 0L); + } + return concurrentQueryBreakdownMap; } /** @@ -163,12 +206,19 @@ private Map> buildSliceLevelBreakdown(long createWe * time/count stats for each breakdown type. Total time per breakdown type at query level is computed by subtracting the max of slice * end time with min of slice start time for that type. Count for each breakdown type at query level is sum of count of that type * across slices. Other than these, there are max/min/avg stats across slices for each breakdown type - * @param sliceLevelBreakdowns breakdown map for all the slices - * @param createWeightStartTime start time of create weight + * + * @param sliceLevelBreakdowns breakdown map for all the slices + * @param createWeightTime time for create weight + * @param createWeightStartTime start time for create weight * @return breakdown map for entire query */ - private Map buildQueryBreakdownMap(Map> sliceLevelBreakdowns, long createWeightStartTime) { + public Map buildQueryBreakdownMap( + Map> sliceLevelBreakdowns, + long createWeightTime, + long createWeightStartTime) + { final Map queryBreakdownMap = new HashMap<>(); + long queryEndTime = Long.MIN_VALUE; for (QueryTimingType queryTimingType : QueryTimingType.values()) { final String timingTypeKey = queryTimingType.toString(); final String timingTypeCountKey = timingTypeKey + TIMING_TYPE_COUNT_SUFFIX; @@ -178,9 +228,9 @@ private Map buildQueryBreakdownMap(Map buildQueryBreakdownMap(Map buildQueryBreakdownMap(Map testMap = new HashMap<>(); - testMap.put(CREATE_WEIGHT, 370343L); - testMap.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); - testMap.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1598347679188617L); - testMap.put(CREATE_WEIGHT + MAX_END_TIME_SUFFIX, 1598347679558960L); - testMap.put(CREATE_WEIGHT + MIN_START_TIME_SUFFIX, 1598347679188617L); - testMap.put(BUILD_SCORER, 0L); - testMap.put(BUILD_SCORER + COUNT_SUFFIX, 6L); - testMap.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); - testMap.put(BUILD_SCORER + MAX_END_TIME_SUFFIX, 1598347688270123L); - testMap.put(BUILD_SCORER + MIN_START_TIME_SUFFIX, 1598347686448701L); - testMap.put(NEXT_DOC, 0L); - testMap.put(NEXT_DOC + COUNT_SUFFIX, 5L); - testMap.put(NEXT_DOC + START_TIME_SUFFIX, 0L); - testMap.put(NEXT_DOC + MAX_END_TIME_SUFFIX, 1598347688298671L); - testMap.put(NEXT_DOC + MIN_START_TIME_SUFFIX, 1598347688110288L); - testMap.put(ADVANCE, 0L); - testMap.put(ADVANCE + COUNT_SUFFIX, 3L); - testMap.put(ADVANCE + START_TIME_SUFFIX, 0L); - testMap.put(ADVANCE + MAX_END_TIME_SUFFIX, 1598347688280739L); - testMap.put(ADVANCE + MIN_START_TIME_SUFFIX, 1598347687991984L); - testMap.put(MATCH, 0L); - testMap.put(MATCH + COUNT_SUFFIX, 0L); - testMap.put(MATCH + START_TIME_SUFFIX, 0L); - testMap.put(MATCH + MAX_END_TIME_SUFFIX, 0L); - testMap.put(MATCH + MIN_START_TIME_SUFFIX, 0L); - testMap.put(SCORE, 0L); - testMap.put(SCORE + COUNT_SUFFIX, 5L); - testMap.put(SCORE + START_TIME_SUFFIX, 0L); - testMap.put(SCORE + MAX_END_TIME_SUFFIX, 1598347688282743L); - testMap.put(SCORE + MIN_START_TIME_SUFFIX, 1598347688018500L); - testMap.put(SHALLOW_ADVANCE, 0L); - testMap.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); - testMap.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); - testMap.put(SHALLOW_ADVANCE + MAX_END_TIME_SUFFIX, 0L); - testMap.put(SHALLOW_ADVANCE + MIN_START_TIME_SUFFIX, 0L); - testMap.put(COMPUTE_MAX_SCORE, 0L); - testMap.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); - testMap.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); - testMap.put(COMPUTE_MAX_SCORE + MAX_END_TIME_SUFFIX, 0L); - testMap.put(COMPUTE_MAX_SCORE + MIN_START_TIME_SUFFIX, 0L); - testMap.put(SET_MIN_COMPETITIVE_SCORE, 0L); - testMap.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); - testMap.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); - testMap.put(SET_MIN_COMPETITIVE_SCORE + MAX_END_TIME_SUFFIX, 0L); - testMap.put(SET_MIN_COMPETITIVE_SCORE + MIN_START_TIME_SUFFIX, 0L); - ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); - Map> sliceLevelBreakdown = new HashMap<>(); - sliceLevelBreakdown.put("testCollector", testMap); - Map breakdownMap = profileBreakdown.buildFinalBreakdownMap(sliceLevelBreakdown); - assertEquals(66, breakdownMap.size()); - assertEquals( - "{max_match=0, set_min_competitive_score_count=0, match_count=0, avg_score_count=5, shallow_advance_count=0, next_doc=188383, min_build_scorer=0, score_count=5, compute_max_score_count=0, advance=288755, min_advance=0, min_set_min_competitive_score=0, score=264243, avg_set_min_competitive_score_count=0, min_match_count=0, avg_score=0, max_next_doc_count=5, avg_shallow_advance=0, max_compute_max_score_count=0, max_shallow_advance_count=0, set_min_competitive_score=0, min_build_scorer_count=6, next_doc_count=5, avg_next_doc=0, min_match=0, compute_max_score=0, max_build_scorer=0, min_set_min_competitive_score_count=0, avg_match_count=0, avg_advance=0, build_scorer_count=6, avg_build_scorer_count=6, min_next_doc_count=5, avg_match=0, max_score_count=5, min_shallow_advance_count=0, avg_compute_max_score=0, max_advance=0, avg_shallow_advance_count=0, avg_set_min_competitive_score=0, avg_compute_max_score_count=0, avg_build_scorer=0, max_set_min_competitive_score_count=0, advance_count=3, max_build_scorer_count=6, shallow_advance=0, max_match_count=0, min_compute_max_score=0, create_weight_count=1, build_scorer=1821422, max_compute_max_score=0, max_set_min_competitive_score=0, min_shallow_advance=0, match=0, min_next_doc=0, avg_advance_count=3, max_shallow_advance=0, max_advance_count=3, min_score=0, max_next_doc=0, create_weight=370343, avg_next_doc_count=5, max_score=0, min_compute_max_score_count=0, min_score_count=5, min_advance_count=3}", - breakdownMap.toString() - ); +// Map testMap = new HashMap<>(); +// testMap.put(CREATE_WEIGHT, 370343L); +// testMap.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); +// testMap.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1598347679188617L); +// testMap.put(CREATE_WEIGHT + MAX_END_TIME_SUFFIX, 1598347679558960L); +// testMap.put(CREATE_WEIGHT + MIN_START_TIME_SUFFIX, 1598347679188617L); +// testMap.put(BUILD_SCORER, 0L); +// testMap.put(BUILD_SCORER + COUNT_SUFFIX, 6L); +// testMap.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); +// testMap.put(BUILD_SCORER + MAX_END_TIME_SUFFIX, 1598347688270123L); +// testMap.put(BUILD_SCORER + MIN_START_TIME_SUFFIX, 1598347686448701L); +// testMap.put(NEXT_DOC, 0L); +// testMap.put(NEXT_DOC + COUNT_SUFFIX, 5L); +// testMap.put(NEXT_DOC + START_TIME_SUFFIX, 0L); +// testMap.put(NEXT_DOC + MAX_END_TIME_SUFFIX, 1598347688298671L); +// testMap.put(NEXT_DOC + MIN_START_TIME_SUFFIX, 1598347688110288L); +// testMap.put(ADVANCE, 0L); +// testMap.put(ADVANCE + COUNT_SUFFIX, 3L); +// testMap.put(ADVANCE + START_TIME_SUFFIX, 0L); +// testMap.put(ADVANCE + MAX_END_TIME_SUFFIX, 1598347688280739L); +// testMap.put(ADVANCE + MIN_START_TIME_SUFFIX, 1598347687991984L); +// testMap.put(MATCH, 0L); +// testMap.put(MATCH + COUNT_SUFFIX, 0L); +// testMap.put(MATCH + START_TIME_SUFFIX, 0L); +// testMap.put(MATCH + MAX_END_TIME_SUFFIX, 0L); +// testMap.put(MATCH + MIN_START_TIME_SUFFIX, 0L); +// testMap.put(SCORE, 0L); +// testMap.put(SCORE + COUNT_SUFFIX, 5L); +// testMap.put(SCORE + START_TIME_SUFFIX, 0L); +// testMap.put(SCORE + MAX_END_TIME_SUFFIX, 1598347688282743L); +// testMap.put(SCORE + MIN_START_TIME_SUFFIX, 1598347688018500L); +// testMap.put(SHALLOW_ADVANCE, 0L); +// testMap.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); +// testMap.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); +// testMap.put(SHALLOW_ADVANCE + MAX_END_TIME_SUFFIX, 0L); +// testMap.put(SHALLOW_ADVANCE + MIN_START_TIME_SUFFIX, 0L); +// testMap.put(COMPUTE_MAX_SCORE, 0L); +// testMap.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); +// testMap.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); +// testMap.put(COMPUTE_MAX_SCORE + MAX_END_TIME_SUFFIX, 0L); +// testMap.put(COMPUTE_MAX_SCORE + MIN_START_TIME_SUFFIX, 0L); +// testMap.put(SET_MIN_COMPETITIVE_SCORE, 0L); +// testMap.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); +// testMap.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); +// testMap.put(SET_MIN_COMPETITIVE_SCORE + MAX_END_TIME_SUFFIX, 0L); +// testMap.put(SET_MIN_COMPETITIVE_SCORE + MIN_START_TIME_SUFFIX, 0L); +// ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); +// Map> sliceLevelBreakdown = new HashMap<>(); +// sliceLevelBreakdown.put("testCollector", testMap); +// Map breakdownMap = profileBreakdown.buildQueryBreakdownMap(sliceLevelBreakdown, 1598347679188617L); +// assertEquals(66, breakdownMap.size()); +// assertEquals( +// "{max_match=0, set_min_competitive_score_count=0, match_count=0, avg_score_count=5, shallow_advance_count=0, next_doc=188383, min_build_scorer=0, score_count=5, compute_max_score_count=0, advance=288755, min_advance=0, min_set_min_competitive_score=0, score=264243, avg_set_min_competitive_score_count=0, min_match_count=0, avg_score=0, max_next_doc_count=5, avg_shallow_advance=0, max_compute_max_score_count=0, max_shallow_advance_count=0, set_min_competitive_score=0, min_build_scorer_count=6, next_doc_count=5, avg_next_doc=0, min_match=0, compute_max_score=0, max_build_scorer=0, min_set_min_competitive_score_count=0, avg_match_count=0, avg_advance=0, build_scorer_count=6, avg_build_scorer_count=6, min_next_doc_count=5, avg_match=0, max_score_count=5, min_shallow_advance_count=0, avg_compute_max_score=0, max_advance=0, avg_shallow_advance_count=0, avg_set_min_competitive_score=0, avg_compute_max_score_count=0, avg_build_scorer=0, max_set_min_competitive_score_count=0, advance_count=3, max_build_scorer_count=6, shallow_advance=0, max_match_count=0, min_compute_max_score=0, create_weight_count=1, build_scorer=1821422, max_compute_max_score=0, max_set_min_competitive_score=0, min_shallow_advance=0, match=0, min_next_doc=0, avg_advance_count=3, max_shallow_advance=0, max_advance_count=3, min_score=0, max_next_doc=0, create_weight=370343, avg_next_doc_count=5, max_score=0, min_compute_max_score_count=0, min_score_count=5, min_advance_count=3}", +// breakdownMap.toString() +// ); } } From 34b048e406bc38474a15daadf57efb31da46cf7a Mon Sep 17 00:00:00 2001 From: Sorabh Hamirwasia Date: Thu, 31 Aug 2023 16:28:49 -0700 Subject: [PATCH 7/9] Add UTs for ConcurrentQueryProfileBreakdown Signed-off-by: Sorabh Hamirwasia --- .../ConcurrentQueryProfileBreakdown.java | 20 +- .../ConcurrentQueryProfileBreakdownTests.java | 370 ++++++++++++++---- 2 files changed, 310 insertions(+), 80 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 49309df32d3c0..1c91c92784b72 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -28,18 +28,18 @@ * @opensearch.internal */ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBreakdown { - private static final String SLICE_END_TIME_SUFFIX = "_slice_end_time"; - private static final String SLICE_START_TIME_SUFFIX = "_slice_start_time"; - private static final String MAX_PREFIX = "max_"; - private static final String MIN_PREFIX = "min_"; - private static final String AVG_PREFIX = "avg_"; + static final String SLICE_END_TIME_SUFFIX = "_slice_end_time"; + static final String SLICE_START_TIME_SUFFIX = "_slice_start_time"; + static final String MAX_PREFIX = "max_"; + static final String MIN_PREFIX = "min_"; + static final String AVG_PREFIX = "avg_"; private long queryNodeTime = Long.MIN_VALUE; private long maxSliceNodeTime = Long.MIN_VALUE; private long minSliceNodeTime = Long.MAX_VALUE; private long avgSliceNodeTime = 0L; - // keep track of all breakdown timings per segment - private final Map> contexts = new ConcurrentHashMap<>(); + // keep track of all breakdown timings per segment. package-private for testing + final Map> contexts = new ConcurrentHashMap<>(); // represents slice to leaves mapping as for each slice a unique collector instance is created private final Map> sliceCollectorToLeaves = new HashMap<>(); @@ -127,7 +127,7 @@ private Map buildDefaultQueryBreakdownMap(long createWeightTime) { * @param createWeightStartTime start time when createWeight is called * @return map of collector (or slice) to breakdown map */ - private Map> buildSliceLevelBreakdown(long createWeightStartTime) { + Map> buildSliceLevelBreakdown(long createWeightStartTime) { final Map> sliceLevelBreakdowns = new HashMap<>(); long totalSliceNodeTime = 0; for (Map.Entry> slice : sliceCollectorToLeaves.entrySet()) { @@ -215,8 +215,8 @@ private Map> buildSliceLevelBreakdown(long createWe public Map buildQueryBreakdownMap( Map> sliceLevelBreakdowns, long createWeightTime, - long createWeightStartTime) - { + long createWeightStartTime + ) { final Map queryBreakdownMap = new HashMap<>(); long queryEndTime = Long.MIN_VALUE; for (QueryTimingType queryTimingType : QueryTimingType.values()) { diff --git a/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java index 61a15e5cf289c..fed3074a461b1 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java @@ -13,81 +13,311 @@ package org.opensearch.search.profile.query; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.Collector; +import org.apache.lucene.store.Directory; +import org.opensearch.search.profile.AbstractProfileBreakdown; +import org.opensearch.search.profile.Timer; import org.opensearch.test.OpenSearchTestCase; +import org.junit.Before; import java.util.HashMap; import java.util.Map; +import static org.opensearch.search.profile.AbstractProfileBreakdown.TIMING_TYPE_COUNT_SUFFIX; +import static org.opensearch.search.profile.AbstractProfileBreakdown.TIMING_TYPE_START_TIME_SUFFIX; +import static org.opensearch.search.profile.query.ConcurrentQueryProfileBreakdown.MIN_PREFIX; +import static org.opensearch.search.profile.query.ConcurrentQueryProfileBreakdown.SLICE_END_TIME_SUFFIX; +import static org.opensearch.search.profile.query.ConcurrentQueryProfileBreakdown.SLICE_START_TIME_SUFFIX; +import static org.mockito.Mockito.mock; + public class ConcurrentQueryProfileBreakdownTests extends OpenSearchTestCase { - private static final String CREATE_WEIGHT = QueryTimingType.CREATE_WEIGHT.toString(); - private static final String BUILD_SCORER = QueryTimingType.BUILD_SCORER.toString(); - private static final String NEXT_DOC = QueryTimingType.NEXT_DOC.toString(); - private static final String ADVANCE = QueryTimingType.ADVANCE.toString(); - private static final String MATCH = QueryTimingType.MATCH.toString(); - private static final String SCORE = QueryTimingType.SCORE.toString(); - private static final String SHALLOW_ADVANCE = QueryTimingType.SHALLOW_ADVANCE.toString(); - private static final String COMPUTE_MAX_SCORE = QueryTimingType.COMPUTE_MAX_SCORE.toString(); - private static final String SET_MIN_COMPETITIVE_SCORE = QueryTimingType.SET_MIN_COMPETITIVE_SCORE.toString(); - private static final String COUNT_SUFFIX = "_count"; - private static final String START_TIME_SUFFIX = "_start_time"; - private static final String MAX_END_TIME_SUFFIX = "_max_end_time"; - private static final String MIN_START_TIME_SUFFIX = "_min_start_time"; - - public void testBuildFinalBreakdownMap() { -// Map testMap = new HashMap<>(); -// testMap.put(CREATE_WEIGHT, 370343L); -// testMap.put(CREATE_WEIGHT + COUNT_SUFFIX, 1L); -// testMap.put(CREATE_WEIGHT + START_TIME_SUFFIX, 1598347679188617L); -// testMap.put(CREATE_WEIGHT + MAX_END_TIME_SUFFIX, 1598347679558960L); -// testMap.put(CREATE_WEIGHT + MIN_START_TIME_SUFFIX, 1598347679188617L); -// testMap.put(BUILD_SCORER, 0L); -// testMap.put(BUILD_SCORER + COUNT_SUFFIX, 6L); -// testMap.put(BUILD_SCORER + START_TIME_SUFFIX, 0L); -// testMap.put(BUILD_SCORER + MAX_END_TIME_SUFFIX, 1598347688270123L); -// testMap.put(BUILD_SCORER + MIN_START_TIME_SUFFIX, 1598347686448701L); -// testMap.put(NEXT_DOC, 0L); -// testMap.put(NEXT_DOC + COUNT_SUFFIX, 5L); -// testMap.put(NEXT_DOC + START_TIME_SUFFIX, 0L); -// testMap.put(NEXT_DOC + MAX_END_TIME_SUFFIX, 1598347688298671L); -// testMap.put(NEXT_DOC + MIN_START_TIME_SUFFIX, 1598347688110288L); -// testMap.put(ADVANCE, 0L); -// testMap.put(ADVANCE + COUNT_SUFFIX, 3L); -// testMap.put(ADVANCE + START_TIME_SUFFIX, 0L); -// testMap.put(ADVANCE + MAX_END_TIME_SUFFIX, 1598347688280739L); -// testMap.put(ADVANCE + MIN_START_TIME_SUFFIX, 1598347687991984L); -// testMap.put(MATCH, 0L); -// testMap.put(MATCH + COUNT_SUFFIX, 0L); -// testMap.put(MATCH + START_TIME_SUFFIX, 0L); -// testMap.put(MATCH + MAX_END_TIME_SUFFIX, 0L); -// testMap.put(MATCH + MIN_START_TIME_SUFFIX, 0L); -// testMap.put(SCORE, 0L); -// testMap.put(SCORE + COUNT_SUFFIX, 5L); -// testMap.put(SCORE + START_TIME_SUFFIX, 0L); -// testMap.put(SCORE + MAX_END_TIME_SUFFIX, 1598347688282743L); -// testMap.put(SCORE + MIN_START_TIME_SUFFIX, 1598347688018500L); -// testMap.put(SHALLOW_ADVANCE, 0L); -// testMap.put(SHALLOW_ADVANCE + COUNT_SUFFIX, 0L); -// testMap.put(SHALLOW_ADVANCE + START_TIME_SUFFIX, 0L); -// testMap.put(SHALLOW_ADVANCE + MAX_END_TIME_SUFFIX, 0L); -// testMap.put(SHALLOW_ADVANCE + MIN_START_TIME_SUFFIX, 0L); -// testMap.put(COMPUTE_MAX_SCORE, 0L); -// testMap.put(COMPUTE_MAX_SCORE + COUNT_SUFFIX, 0L); -// testMap.put(COMPUTE_MAX_SCORE + START_TIME_SUFFIX, 0L); -// testMap.put(COMPUTE_MAX_SCORE + MAX_END_TIME_SUFFIX, 0L); -// testMap.put(COMPUTE_MAX_SCORE + MIN_START_TIME_SUFFIX, 0L); -// testMap.put(SET_MIN_COMPETITIVE_SCORE, 0L); -// testMap.put(SET_MIN_COMPETITIVE_SCORE + COUNT_SUFFIX, 0L); -// testMap.put(SET_MIN_COMPETITIVE_SCORE + START_TIME_SUFFIX, 0L); -// testMap.put(SET_MIN_COMPETITIVE_SCORE + MAX_END_TIME_SUFFIX, 0L); -// testMap.put(SET_MIN_COMPETITIVE_SCORE + MIN_START_TIME_SUFFIX, 0L); -// ConcurrentQueryProfileBreakdown profileBreakdown = new ConcurrentQueryProfileBreakdown(); -// Map> sliceLevelBreakdown = new HashMap<>(); -// sliceLevelBreakdown.put("testCollector", testMap); -// Map breakdownMap = profileBreakdown.buildQueryBreakdownMap(sliceLevelBreakdown, 1598347679188617L); -// assertEquals(66, breakdownMap.size()); -// assertEquals( -// "{max_match=0, set_min_competitive_score_count=0, match_count=0, avg_score_count=5, shallow_advance_count=0, next_doc=188383, min_build_scorer=0, score_count=5, compute_max_score_count=0, advance=288755, min_advance=0, min_set_min_competitive_score=0, score=264243, avg_set_min_competitive_score_count=0, min_match_count=0, avg_score=0, max_next_doc_count=5, avg_shallow_advance=0, max_compute_max_score_count=0, max_shallow_advance_count=0, set_min_competitive_score=0, min_build_scorer_count=6, next_doc_count=5, avg_next_doc=0, min_match=0, compute_max_score=0, max_build_scorer=0, min_set_min_competitive_score_count=0, avg_match_count=0, avg_advance=0, build_scorer_count=6, avg_build_scorer_count=6, min_next_doc_count=5, avg_match=0, max_score_count=5, min_shallow_advance_count=0, avg_compute_max_score=0, max_advance=0, avg_shallow_advance_count=0, avg_set_min_competitive_score=0, avg_compute_max_score_count=0, avg_build_scorer=0, max_set_min_competitive_score_count=0, advance_count=3, max_build_scorer_count=6, shallow_advance=0, max_match_count=0, min_compute_max_score=0, create_weight_count=1, build_scorer=1821422, max_compute_max_score=0, max_set_min_competitive_score=0, min_shallow_advance=0, match=0, min_next_doc=0, avg_advance_count=3, max_shallow_advance=0, max_advance_count=3, min_score=0, max_next_doc=0, create_weight=370343, avg_next_doc_count=5, max_score=0, min_compute_max_score_count=0, min_score_count=5, min_advance_count=3}", -// breakdownMap.toString() -// ); + private ConcurrentQueryProfileBreakdown testQueryProfileBreakdown; + private Timer createWeightTimer; + + @Before + public void setup() { + testQueryProfileBreakdown = new ConcurrentQueryProfileBreakdown(); + createWeightTimer = testQueryProfileBreakdown.getTimer(QueryTimingType.CREATE_WEIGHT); + try { + createWeightTimer.start(); + Thread.sleep(10); + } catch (InterruptedException ex) { + // ignore + } finally { + createWeightTimer.stop(); + } + } + + public void testBreakdownMapWithNoLeafContext() throws Exception { + final Map queryBreakDownMap = testQueryProfileBreakdown.toBreakdownMap(); + assertFalse(queryBreakDownMap == null || queryBreakDownMap.isEmpty()); + assertEquals(66, queryBreakDownMap.size()); + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingTypeKey = queryTimingType.toString(); + String timingTypeCountKey = queryTimingType + TIMING_TYPE_COUNT_SUFFIX; + + if (queryTimingType.equals(QueryTimingType.CREATE_WEIGHT)) { + final long createWeightTime = queryBreakDownMap.get(timingTypeKey); + assertTrue(createWeightTime > 0); + assertEquals(1, (long) queryBreakDownMap.get(timingTypeCountKey)); + // verify there is no min/max/avg for weight type stats + assertFalse( + queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(MIN_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeCountKey) + || queryBreakDownMap.containsKey(MIN_PREFIX + timingTypeCountKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey) + ); + // verify total/min/max/avg node time is same as weight time + assertEquals(createWeightTime, testQueryProfileBreakdown.toNodeTime()); + assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + continue; + } + assertEquals(0, (long) queryBreakDownMap.get(timingTypeKey)); + assertEquals(0, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeKey)); + assertEquals(0, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeKey)); + assertEquals(0, (long) queryBreakDownMap.get(MIN_PREFIX + timingTypeKey)); + assertEquals(0, (long) queryBreakDownMap.get(timingTypeCountKey)); + assertEquals(0, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeCountKey)); + assertEquals(0, (long) queryBreakDownMap.get(MIN_PREFIX + timingTypeCountKey)); + assertEquals(0, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey)); + } + } + + public void testBuildSliceLevelBreakdownWithSingleSlice() throws Exception { + final DirectoryReader directoryReader = getDirectoryReader(1); + final Directory directory = directoryReader.directory(); + final LeafReaderContext sliceLeaf = directoryReader.leaves().get(0); + final Collector sliceCollector = mock(Collector.class); + final long createWeightEarliestStartTime = createWeightTimer.getEarliestTimerStartTime(); + final Map leafProfileBreakdownMap = getLeafBreakdownMap(createWeightEarliestStartTime + 10, 10, 1); + final AbstractProfileBreakdown leafProfileBreakdown = new TestQueryProfileBreakdown( + QueryTimingType.class, + leafProfileBreakdownMap + ); + testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector, sliceLeaf); + testQueryProfileBreakdown.contexts.put(sliceLeaf, leafProfileBreakdown); + final Map> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown( + createWeightEarliestStartTime + ); + assertFalse(sliceBreakdownMap == null || sliceBreakdownMap.isEmpty()); + assertEquals(1, sliceBreakdownMap.size()); + assertTrue(sliceBreakdownMap.containsKey(sliceCollector)); + + final Map sliceBreakdown = sliceBreakdownMap.entrySet().iterator().next().getValue(); + for (QueryTimingType timingType : QueryTimingType.values()) { + String timingTypeKey = timingType.toString(); + String timingTypeCountKey = timingTypeKey + TIMING_TYPE_COUNT_SUFFIX; + + if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) { + // there should be no entry for create weight at slice level breakdown map + assertNull(sliceBreakdown.get(timingTypeKey)); + assertNull(sliceBreakdown.get(timingTypeCountKey)); + continue; + } + + // for other timing type we will have all the value and will be same as leaf breakdown as there is single slice and single leaf + assertEquals(leafProfileBreakdownMap.get(timingTypeKey), sliceBreakdown.get(timingTypeKey)); + assertEquals(leafProfileBreakdownMap.get(timingTypeCountKey), sliceBreakdown.get(timingTypeCountKey)); + assertEquals( + leafProfileBreakdownMap.get(timingTypeKey + TIMING_TYPE_START_TIME_SUFFIX), + sliceBreakdown.get(timingTypeKey + SLICE_START_TIME_SUFFIX) + ); + assertEquals( + leafProfileBreakdownMap.get(timingTypeKey + TIMING_TYPE_START_TIME_SUFFIX) + leafProfileBreakdownMap.get(timingTypeKey), + (long) sliceBreakdown.get(timingTypeKey + SLICE_END_TIME_SUFFIX) + ); + } + assertEquals(20, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(20, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + directoryReader.close(); + directory.close(); + } + + public void testBuildSliceLevelBreakdownWithMultipleSlices() throws Exception { + final DirectoryReader directoryReader = getDirectoryReader(2); + final Directory directory = directoryReader.directory(); + final Collector sliceCollector_1 = mock(Collector.class); + final Collector sliceCollector_2 = mock(Collector.class); + final long createWeightEarliestStartTime = createWeightTimer.getEarliestTimerStartTime(); + final Map leafProfileBreakdownMap_1 = getLeafBreakdownMap(createWeightEarliestStartTime + 10, 10, 1); + final Map leafProfileBreakdownMap_2 = getLeafBreakdownMap(createWeightEarliestStartTime + 40, 10, 1); + final AbstractProfileBreakdown leafProfileBreakdown_1 = new TestQueryProfileBreakdown( + QueryTimingType.class, + leafProfileBreakdownMap_1 + ); + final AbstractProfileBreakdown leafProfileBreakdown_2 = new TestQueryProfileBreakdown( + QueryTimingType.class, + leafProfileBreakdownMap_2 + ); + testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0)); + testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1)); + testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1); + testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2); + final Map> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown( + createWeightEarliestStartTime + ); + assertFalse(sliceBreakdownMap == null || sliceBreakdownMap.isEmpty()); + assertEquals(2, sliceBreakdownMap.size()); + + for (Map.Entry> sliceBreakdowns : sliceBreakdownMap.entrySet()) { + Map sliceBreakdown = sliceBreakdowns.getValue(); + Map leafProfileBreakdownMap; + if (sliceBreakdowns.getKey().equals(sliceCollector_1)) { + leafProfileBreakdownMap = leafProfileBreakdownMap_1; + } else { + leafProfileBreakdownMap = leafProfileBreakdownMap_2; + } + for (QueryTimingType timingType : QueryTimingType.values()) { + String timingTypeKey = timingType.toString(); + String timingTypeCountKey = timingTypeKey + TIMING_TYPE_COUNT_SUFFIX; + + if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) { + // there should be no entry for create weight at slice level breakdown map + assertNull(sliceBreakdown.get(timingTypeKey)); + assertNull(sliceBreakdown.get(timingTypeCountKey)); + continue; + } + + // for other timing type we will have all the value and will be same as leaf breakdown as there is single slice and single + // leaf + assertEquals(leafProfileBreakdownMap.get(timingTypeKey), sliceBreakdown.get(timingTypeKey)); + assertEquals(leafProfileBreakdownMap.get(timingTypeCountKey), sliceBreakdown.get(timingTypeCountKey)); + assertEquals( + leafProfileBreakdownMap.get(timingTypeKey + TIMING_TYPE_START_TIME_SUFFIX), + sliceBreakdown.get(timingTypeKey + SLICE_START_TIME_SUFFIX) + ); + assertEquals( + leafProfileBreakdownMap.get(timingTypeKey + TIMING_TYPE_START_TIME_SUFFIX) + leafProfileBreakdownMap.get(timingTypeKey), + (long) sliceBreakdown.get(timingTypeKey + SLICE_END_TIME_SUFFIX) + ); + } + } + + assertEquals(50, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(35, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + directoryReader.close(); + directory.close(); + } + + public void testBreakDownMapWithMultipleSlices() throws Exception { + final DirectoryReader directoryReader = getDirectoryReader(2); + final Directory directory = directoryReader.directory(); + final Collector sliceCollector_1 = mock(Collector.class); + final Collector sliceCollector_2 = mock(Collector.class); + final long createWeightEarliestStartTime = createWeightTimer.getEarliestTimerStartTime(); + final Map leafProfileBreakdownMap_1 = getLeafBreakdownMap(createWeightEarliestStartTime + 10, 10, 1); + final Map leafProfileBreakdownMap_2 = getLeafBreakdownMap(createWeightEarliestStartTime + 40, 20, 1); + final AbstractProfileBreakdown leafProfileBreakdown_1 = new TestQueryProfileBreakdown( + QueryTimingType.class, + leafProfileBreakdownMap_1 + ); + final AbstractProfileBreakdown leafProfileBreakdown_2 = new TestQueryProfileBreakdown( + QueryTimingType.class, + leafProfileBreakdownMap_2 + ); + testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0)); + testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1)); + testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1); + testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2); + + Map queryBreakDownMap = testQueryProfileBreakdown.toBreakdownMap(); + assertFalse(queryBreakDownMap == null || queryBreakDownMap.isEmpty()); + assertEquals(66, queryBreakDownMap.size()); + + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + String timingTypeKey = queryTimingType.toString(); + String timingTypeCountKey = queryTimingType + TIMING_TYPE_COUNT_SUFFIX; + + if (queryTimingType.equals(QueryTimingType.CREATE_WEIGHT)) { + final long createWeightTime = queryBreakDownMap.get(timingTypeKey); + assertEquals(createWeightTimer.getApproximateTiming(), createWeightTime); + assertEquals(1, (long) queryBreakDownMap.get(timingTypeCountKey)); + // verify there is no min/max/avg for weight type stats + assertFalse( + queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(MIN_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeCountKey) + || queryBreakDownMap.containsKey(MIN_PREFIX + timingTypeCountKey) + || queryBreakDownMap.containsKey(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey) + ); + continue; + } + assertEquals(50, (long) queryBreakDownMap.get(timingTypeKey)); + assertEquals(20, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeKey)); + assertEquals(15, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeKey)); + assertEquals(10, (long) queryBreakDownMap.get(MIN_PREFIX + timingTypeKey)); + assertEquals(2, (long) queryBreakDownMap.get(timingTypeCountKey)); + assertEquals(1, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.MAX_PREFIX + timingTypeCountKey)); + assertEquals(1, (long) queryBreakDownMap.get(MIN_PREFIX + timingTypeCountKey)); + assertEquals(1, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey)); + } + + assertEquals(60, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(40, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + directoryReader.close(); + directory.close(); + } + + private Map getLeafBreakdownMap(long startTime, long timeTaken, long count) { + Map leafBreakDownMap = new HashMap<>(); + for (QueryTimingType timingType : QueryTimingType.values()) { + if (timingType.equals(QueryTimingType.CREATE_WEIGHT)) { + // don't add anything + continue; + } + String timingTypeKey = timingType.toString(); + leafBreakDownMap.put(timingTypeKey, timeTaken); + leafBreakDownMap.put(timingTypeKey + TIMING_TYPE_COUNT_SUFFIX, count); + leafBreakDownMap.put(timingTypeKey + TIMING_TYPE_START_TIME_SUFFIX, startTime); + } + return leafBreakDownMap; + } + + private DirectoryReader getDirectoryReader(int numLeaves) throws Exception { + final Directory directory = newDirectory(); + IndexWriter iw = new IndexWriter(directory, new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(NoMergePolicy.INSTANCE)); + + for (int i = 0; i < numLeaves; ++i) { + Document document = new Document(); + document.add(new StringField("field1", "value" + i, Field.Store.NO)); + document.add(new StringField("field2", "value" + i, Field.Store.NO)); + iw.addDocument(document); + iw.commit(); + } + iw.deleteDocuments(new Term("field1", "value3")); + iw.close(); + return DirectoryReader.open(directory); + } + + private static class TestQueryProfileBreakdown extends AbstractProfileBreakdown { + private Map breakdownMap; + + public TestQueryProfileBreakdown(Class clazz, Map breakdownMap) { + super(clazz); + this.breakdownMap = breakdownMap; + } + + @Override + public Map toBreakdownMap() { + return breakdownMap; + } } } From ae1af6246d526381b2b3618ba47f9ccb8602ee71 Mon Sep 17 00:00:00 2001 From: Ticheng Lin Date: Thu, 31 Aug 2023 17:59:00 -0700 Subject: [PATCH 8/9] Add concurrent search stats test into the QueryProfilerIT Signed-off-by: Ticheng Lin --- .../search/profile/query/QueryProfilerIT.java | 55 ++++++++++++++++++- .../profile/query/QueryProfilerTests.java | 8 +-- .../search/query/QueryProfilePhaseTests.java | 26 ++++----- 3 files changed, 67 insertions(+), 22 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java index 39966888fdff4..24f2b1bbdabfc 100644 --- a/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/search/profile/query/QueryProfilerIT.java @@ -66,17 +66,25 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; public class QueryProfilerIT extends ParameterizedOpenSearchIntegTestCase { - public QueryProfilerIT(Settings settings) { + private final boolean concurrentSearchEnabled; + private static final String MAX_PREFIX = "max_"; + private static final String MIN_PREFIX = "min_"; + private static final String AVG_PREFIX = "avg_"; + private static final String TIMING_TYPE_COUNT_SUFFIX = "_count"; + + public QueryProfilerIT(Settings settings, boolean concurrentSearchEnabled) { super(settings); + this.concurrentSearchEnabled = concurrentSearchEnabled; } @ParametersFactory public static Collection parameters() { return Arrays.asList( - new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build() }, - new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build() } + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), false).build(), false }, + new Object[] { Settings.builder().put(CLUSTER_CONCURRENT_SEGMENT_SEARCH_SETTING.getKey(), true).build(), true } ); } @@ -250,6 +258,7 @@ public void testSimpleMatch() throws Exception { assertEquals(result.getLuceneDescription(), "field1:one"); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -292,6 +301,7 @@ public void testBool() throws Exception { assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); assertEquals(result.getProfiledChildren().size(), 2); + assertQueryProfileResult(result); // Check the children List children = result.getProfiledChildren(); @@ -303,12 +313,14 @@ public void testBool() throws Exception { assertThat(childProfile.getTime(), greaterThan(0L)); assertNotNull(childProfile.getTimeBreakdown()); assertEquals(childProfile.getProfiledChildren().size(), 0); + assertQueryProfileResult(childProfile); childProfile = children.get(1); assertEquals(childProfile.getQueryName(), "TermQuery"); assertEquals(childProfile.getLuceneDescription(), "field1:two"); assertThat(childProfile.getTime(), greaterThan(0L)); assertNotNull(childProfile.getTimeBreakdown()); + assertQueryProfileResult(childProfile); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -351,6 +363,7 @@ public void testEmptyBool() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -396,6 +409,7 @@ public void testCollapsingBool() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -436,6 +450,7 @@ public void testBoosting() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -476,6 +491,7 @@ public void testDisMaxRange() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -515,6 +531,7 @@ public void testRange() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -568,6 +585,7 @@ public void testPhrase() throws Exception { assertNotNull(result.getLuceneDescription()); assertThat(result.getTime(), greaterThan(0L)); assertNotNull(result.getTimeBreakdown()); + assertQueryProfileResult(result); } CollectorResult result = searchProfiles.getCollectorResult(); @@ -600,4 +618,35 @@ public void testNoProfile() throws Exception { assertThat("Profile response element should be an empty map", resp.getProfileResults().size(), equalTo(0)); } + private void assertQueryProfileResult(ProfileResult result) { + Map breakdown = result.getTimeBreakdown(); + Long maxSliceTime = result.getMaxSliceTime(); + Long minSliceTime = result.getMinSliceTime(); + Long avgSliceTime = result.getAvgSliceTime(); + if (concurrentSearchEnabled) { + assertNotNull(maxSliceTime); + assertNotNull(minSliceTime); + assertNotNull(avgSliceTime); + assertThat(breakdown.size(), equalTo(66)); + for (QueryTimingType queryTimingType : QueryTimingType.values()) { + if (queryTimingType != QueryTimingType.CREATE_WEIGHT) { + String maxTimingType = MAX_PREFIX + queryTimingType; + String minTimingType = MIN_PREFIX + queryTimingType; + String avgTimingType = AVG_PREFIX + queryTimingType; + assertNotNull(breakdown.get(maxTimingType)); + assertNotNull(breakdown.get(minTimingType)); + assertNotNull(breakdown.get(avgTimingType)); + assertNotNull(breakdown.get(maxTimingType + TIMING_TYPE_COUNT_SUFFIX)); + assertNotNull(breakdown.get(minTimingType + TIMING_TYPE_COUNT_SUFFIX)); + assertNotNull(breakdown.get(avgTimingType + TIMING_TYPE_COUNT_SUFFIX)); + } + } + } else { + assertThat(maxSliceTime, is(nullValue())); + assertThat(minSliceTime, is(nullValue())); + assertThat(avgSliceTime, is(nullValue())); + assertThat(breakdown.size(), equalTo(27)); + } + } + } diff --git a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java index e3e2f8b851b94..64a440b85eb10 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/QueryProfilerTests.java @@ -88,7 +88,6 @@ import static org.mockito.Mockito.when; public class QueryProfilerTests extends OpenSearchTestCase { - private final int concurrency; private Directory dir; private IndexReader reader; private ContextIndexSearcher searcher; @@ -105,7 +104,6 @@ public static Collection concurrency() { public QueryProfilerTests(int concurrency) { this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null; - this.concurrency = concurrency; } @Before @@ -185,7 +183,7 @@ public void testBasic() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); @@ -252,7 +250,7 @@ public void testNoScoring() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); @@ -333,7 +331,7 @@ public void testApproximations() throws IOException { assertThat(breakdown.get(QueryTimingType.SCORE + TIMING_TYPE_COUNT_SUFFIX), equalTo(0L)); assertThat(breakdown.get(QueryTimingType.MATCH + TIMING_TYPE_COUNT_SUFFIX), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(profileResult.getMaxSliceTime(), is(not(nullValue()))); assertThat(profileResult.getMinSliceTime(), is(not(nullValue()))); assertThat(profileResult.getAvgSliceTime(), is(not(nullValue()))); diff --git a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java index 42f77ebf2d998..fc77c1b356124 100644 --- a/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java +++ b/server/src/test/java/org/opensearch/search/query/QueryProfilePhaseTests.java @@ -97,7 +97,6 @@ import static org.mockito.Mockito.when; public class QueryProfilePhaseTests extends IndexShardTestCase { - private final int concurrency; private IndexShard indexShard; private final ExecutorService executor; private final QueryPhaseSearcher queryPhaseSearcher; @@ -113,7 +112,6 @@ public static Collection concurrency() { public QueryProfilePhaseTests(int concurrency, QueryPhaseSearcher queryPhaseSearcher) { this.executor = (concurrency > 0) ? Executors.newFixedThreadPool(concurrency) : null; this.queryPhaseSearcher = queryPhaseSearcher; - this.concurrency = concurrency; } @Override @@ -340,7 +338,7 @@ public void testMinScoreDisablesCountOptimization() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(1L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(100L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThanOrEqualTo(100L)); @@ -487,7 +485,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -565,7 +563,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -611,7 +609,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -743,7 +741,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(0).getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -758,7 +756,7 @@ public void testTerminateAfterEarlyTermination() throws Exception { assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("create_weight_count"), equalTo(1L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getProfiledChildren().get(1).getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1104,7 +1102,7 @@ public void testDisableTopScoreCollection() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThan(0L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1191,7 +1189,7 @@ public void testMinScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), equalTo(10L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1276,7 +1274,7 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThanOrEqualTo(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1319,7 +1317,7 @@ public void testMaxScore() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1397,7 +1395,7 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); @@ -1432,7 +1430,7 @@ public void testCollapseQuerySearchResults() throws Exception { assertThat(query.getTimeBreakdown().keySet(), not(empty())); assertThat(query.getTimeBreakdown().get("score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("score_count"), greaterThanOrEqualTo(6L)); - if (concurrency > 0) { + if (executor != null) { assertThat(query.getTimeBreakdown().get("max_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("min_score"), greaterThan(0L)); assertThat(query.getTimeBreakdown().get("avg_score"), greaterThan(0L)); From 6644e02297eecf3ff82efad03591ea35da709857 Mon Sep 17 00:00:00 2001 From: Sorabh Hamirwasia Date: Fri, 1 Sep 2023 12:52:33 -0700 Subject: [PATCH 9/9] Address review comments Signed-off-by: Sorabh Hamirwasia --- .../profile/ContextualProfileBreakdown.java | 2 +- .../ConcurrentQueryProfileBreakdown.java | 34 +++++++++++-------- .../query/ConcurrentQueryProfileTree.java | 4 +-- .../ConcurrentQueryProfileBreakdownTests.java | 34 +++++++++---------- 4 files changed, 40 insertions(+), 34 deletions(-) diff --git a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java index 94ac8683fe2b9..3fe621321c8ad 100644 --- a/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/ContextualProfileBreakdown.java @@ -34,5 +34,5 @@ public ContextualProfileBreakdown(Class clazz) { public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) {} - public void associateCollectorToLeaves(Map> collectorToLeaves) {} + public void associateCollectorsToLeaves(Map> collectorToLeaves) {} } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java index 1c91c92784b72..f37d53737d52b 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdown.java @@ -39,10 +39,10 @@ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBrea private long avgSliceNodeTime = 0L; // keep track of all breakdown timings per segment. package-private for testing - final Map> contexts = new ConcurrentHashMap<>(); + private final Map> contexts = new ConcurrentHashMap<>(); // represents slice to leaves mapping as for each slice a unique collector instance is created - private final Map> sliceCollectorToLeaves = new HashMap<>(); + private final Map> sliceCollectorsToLeaves = new ConcurrentHashMap<>(); /** Sole constructor. */ public ConcurrentQueryProfileBreakdown() { @@ -69,7 +69,7 @@ public Map toBreakdownMap() { ); final long createWeightTime = topLevelBreakdownMapWithWeightTime.get(QueryTimingType.CREATE_WEIGHT.toString()); - if (sliceCollectorToLeaves.isEmpty() || contexts.isEmpty()) { + if (sliceCollectorsToLeaves.isEmpty() || contexts.isEmpty()) { // If there are no leaf contexts, then return the default concurrent query level breakdown, which will include the // create_weight time/count queryNodeTime = createWeightTime; @@ -122,7 +122,7 @@ private Map buildDefaultQueryBreakdownMap(long createWeightTime) { } /** - * Computes the slice level breakdownMap. It uses sliceCollectorToLeaves to figure out all the leaves or segments part of a slice. + * Computes the slice level breakdownMap. It uses sliceCollectorsToLeaves to figure out all the leaves or segments part of a slice. * Then use the breakdown timing stats for each of these leaves to calculate the breakdown stats at slice level. * @param createWeightStartTime start time when createWeight is called * @return map of collector (or slice) to breakdown map @@ -130,7 +130,7 @@ private Map buildDefaultQueryBreakdownMap(long createWeightTime) { Map> buildSliceLevelBreakdown(long createWeightStartTime) { final Map> sliceLevelBreakdowns = new HashMap<>(); long totalSliceNodeTime = 0; - for (Map.Entry> slice : sliceCollectorToLeaves.entrySet()) { + for (Map.Entry> slice : sliceCollectorsToLeaves.entrySet()) { final Collector sliceCollector = slice.getKey(); // initialize each slice level breakdown final Map currentSliceBreakdown = sliceLevelBreakdowns.computeIfAbsent(sliceCollector, k -> new HashMap<>()); @@ -197,7 +197,7 @@ Map> buildSliceLevelBreakdown(long createWeightStar // total time at query level totalSliceNodeTime += currentSliceNodeTime; } - avgSliceNodeTime = totalSliceNodeTime / sliceCollectorToLeaves.size(); + avgSliceNodeTime = totalSliceNodeTime / sliceCollectorsToLeaves.size(); return sliceLevelBreakdowns; } @@ -298,27 +298,33 @@ public long toNodeTime() { @Override public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) { - sliceCollectorToLeaves.computeIfAbsent(collector, k -> new ArrayList<>()).add(leaf); + // Each slice (or collector) is executed by single thread. So the list for a key will always be updated by a single thread only + sliceCollectorsToLeaves.computeIfAbsent(collector, k -> new ArrayList<>()).add(leaf); } @Override - public void associateCollectorToLeaves(Map> collectorToLeaves) { - this.sliceCollectorToLeaves.putAll(collectorToLeaves); + public void associateCollectorsToLeaves(Map> collectorsToLeaves) { + sliceCollectorsToLeaves.putAll(collectorsToLeaves); } - Map> getSliceCollectorToLeaves() { - return Collections.unmodifiableMap(sliceCollectorToLeaves); + Map> getSliceCollectorsToLeaves() { + return Collections.unmodifiableMap(sliceCollectorsToLeaves); } - Long getMaxSliceNodeTime() { + // used by tests + Map> getContexts() { + return contexts; + } + + long getMaxSliceNodeTime() { return maxSliceNodeTime; } - Long getMinSliceNodeTime() { + long getMinSliceNodeTime() { return minSliceNodeTime; } - Long getAvgSliceNodeTime() { + long getAvgSliceNodeTime() { return avgSliceNodeTime; } } diff --git a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java index 8b3d605685a47..4e54178c3b4fb 100644 --- a/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java +++ b/server/src/main/java/org/opensearch/search/profile/query/ConcurrentQueryProfileTree.java @@ -65,7 +65,7 @@ public List getTree() { final ContextualProfileBreakdown parentBreakdown = breakdowns.get(root); assert parentBreakdown instanceof ConcurrentQueryProfileBreakdown; final Map> parentCollectorToLeaves = ((ConcurrentQueryProfileBreakdown) parentBreakdown) - .getSliceCollectorToLeaves(); + .getSliceCollectorsToLeaves(); // update all the children with the parent collectorToLeaves association updateCollectorToLeavesForChildBreakdowns(root, parentCollectorToLeaves); } @@ -83,7 +83,7 @@ private void updateCollectorToLeavesForChildBreakdowns(Integer parentToken, Map< if (children != null) { for (Integer currentChild : children) { final ContextualProfileBreakdown currentChildBreakdown = breakdowns.get(currentChild); - currentChildBreakdown.associateCollectorToLeaves(collectorToLeaves); + currentChildBreakdown.associateCollectorsToLeaves(collectorToLeaves); updateCollectorToLeavesForChildBreakdowns(currentChild, collectorToLeaves); } } diff --git a/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java index fed3074a461b1..065c90b262e11 100644 --- a/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java +++ b/server/src/test/java/org/opensearch/search/profile/query/ConcurrentQueryProfileBreakdownTests.java @@ -81,9 +81,9 @@ public void testBreakdownMapWithNoLeafContext() throws Exception { ); // verify total/min/max/avg node time is same as weight time assertEquals(createWeightTime, testQueryProfileBreakdown.toNodeTime()); - assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); - assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); - assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + assertEquals(createWeightTime, testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(createWeightTime, testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(createWeightTime, testQueryProfileBreakdown.getAvgSliceNodeTime()); continue; } assertEquals(0, (long) queryBreakDownMap.get(timingTypeKey)); @@ -109,7 +109,7 @@ public void testBuildSliceLevelBreakdownWithSingleSlice() throws Exception { leafProfileBreakdownMap ); testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector, sliceLeaf); - testQueryProfileBreakdown.contexts.put(sliceLeaf, leafProfileBreakdown); + testQueryProfileBreakdown.getContexts().put(sliceLeaf, leafProfileBreakdown); final Map> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown( createWeightEarliestStartTime ); @@ -141,9 +141,9 @@ public void testBuildSliceLevelBreakdownWithSingleSlice() throws Exception { (long) sliceBreakdown.get(timingTypeKey + SLICE_END_TIME_SUFFIX) ); } - assertEquals(20, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); - assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); - assertEquals(20, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + assertEquals(20, testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(20, testQueryProfileBreakdown.getAvgSliceNodeTime()); directoryReader.close(); directory.close(); } @@ -166,8 +166,8 @@ public void testBuildSliceLevelBreakdownWithMultipleSlices() throws Exception { ); testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0)); testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1)); - testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1); - testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2); + testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(0), leafProfileBreakdown_1); + testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(1), leafProfileBreakdown_2); final Map> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown( createWeightEarliestStartTime ); @@ -208,9 +208,9 @@ public void testBuildSliceLevelBreakdownWithMultipleSlices() throws Exception { } } - assertEquals(50, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); - assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); - assertEquals(35, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + assertEquals(50, testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(35, testQueryProfileBreakdown.getAvgSliceNodeTime()); directoryReader.close(); directory.close(); } @@ -233,8 +233,8 @@ public void testBreakDownMapWithMultipleSlices() throws Exception { ); testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0)); testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1)); - testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1); - testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2); + testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(0), leafProfileBreakdown_1); + testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(1), leafProfileBreakdown_2); Map queryBreakDownMap = testQueryProfileBreakdown.toBreakdownMap(); assertFalse(queryBreakDownMap == null || queryBreakDownMap.isEmpty()); @@ -269,9 +269,9 @@ public void testBreakDownMapWithMultipleSlices() throws Exception { assertEquals(1, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey)); } - assertEquals(60, (long) testQueryProfileBreakdown.getMaxSliceNodeTime()); - assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime()); - assertEquals(40, (long) testQueryProfileBreakdown.getAvgSliceNodeTime()); + assertEquals(60, testQueryProfileBreakdown.getMaxSliceNodeTime()); + assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime()); + assertEquals(40, testQueryProfileBreakdown.getAvgSliceNodeTime()); directoryReader.close(); directory.close(); }