Skip to content

Commit

Permalink
explain
Browse files Browse the repository at this point in the history
  • Loading branch information
seawinde committed Dec 27, 2023
1 parent 75a4548 commit db56257
Show file tree
Hide file tree
Showing 10 changed files with 181 additions and 32 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.doris.nereids.processor.post.PlanPostProcessors;
import org.apache.doris.nereids.processor.pre.PlanPreprocessors;
import org.apache.doris.nereids.properties.PhysicalProperties;
import org.apache.doris.nereids.rules.exploration.mv.MaterializationContext;
import org.apache.doris.nereids.trees.expressions.Expression;
import org.apache.doris.nereids.trees.expressions.NamedExpression;
import org.apache.doris.nereids.trees.expressions.literal.Literal;
Expand Down Expand Up @@ -399,7 +400,9 @@ public String getExplainString(ExplainOptions explainOptions) {
case MEMO_PLAN:
plan = cascadesContext.getMemo().toString()
+ "\n\n========== OPTIMIZED PLAN ==========\n"
+ optimizedPlan.treeString();
+ optimizedPlan.treeString()
+ "\n\n========== MATERIALIZATIONS ==========\n"
+ MaterializationContext.toString(cascadesContext.getMaterializationContexts());
break;
case ALL_PLAN:
plan = "========== PARSED PLAN ==========\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.doris.nereids.jobs.joinorder.hypergraph.edge.Edge;
import org.apache.doris.nereids.trees.plans.GroupPlan;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.util.Utils;

import com.google.common.collect.ImmutableList;

Expand Down Expand Up @@ -67,4 +68,9 @@ public List<HyperGraph> getGraphs() {
return graphs;
}

@Override
public String toString() {
return Utils.toSqlString("StructInfoNode[" + this.getName() + "]",
"plan", this.plan.treeString());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.doris.nereids.jobs.joinorder.hypergraph.edge.JoinEdge;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.AbstractNode;
import org.apache.doris.nereids.jobs.joinorder.hypergraph.node.StructInfoNode;
import org.apache.doris.nereids.memo.GroupExpression;
import org.apache.doris.nereids.rules.exploration.mv.StructInfo.PlanSplitContext;
import org.apache.doris.nereids.rules.exploration.mv.mapping.ExpressionMapping;
import org.apache.doris.nereids.rules.exploration.mv.mapping.SlotMapping;
Expand All @@ -35,10 +36,13 @@
import org.apache.doris.nereids.trees.expressions.functions.agg.BitmapUnion;
import org.apache.doris.nereids.trees.expressions.functions.agg.CouldRollUp;
import org.apache.doris.nereids.trees.expressions.functions.agg.Count;
import org.apache.doris.nereids.trees.plans.AbstractPlan;
import org.apache.doris.nereids.trees.plans.ObjectId;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.logical.LogicalAggregate;
import org.apache.doris.nereids.trees.plans.logical.LogicalProject;
import org.apache.doris.nereids.util.ExpressionUtils;
import org.apache.doris.planner.PlanNodeId;

import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
Expand Down Expand Up @@ -80,12 +84,17 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
MaterializationContext materializationContext) {
// get view and query aggregate and top plan correspondingly
Pair<Plan, LogicalAggregate<Plan>> viewTopPlanAndAggPair = splitToTopPlanAndAggregate(viewStructInfo);
ObjectId queryObjectId = queryStructInfo.getOriginalPlan().getGroupExpression()
.map(GroupExpression::getId).orElseGet(() -> new ObjectId(-1));
if (viewTopPlanAndAggPair == null) {
materializationContext.recordFailReason(queryObjectId, "split to view to top plan and agg fail");
logger.warn(currentClassName + " split to view to top plan and agg fail so return null");
return null;
}
Pair<Plan, LogicalAggregate<Plan>> queryTopPlanAndAggPair = splitToTopPlanAndAggregate(queryStructInfo);
if (queryTopPlanAndAggPair == null) {
materializationContext.recordFailReason(queryObjectId,
"split to query to top plan and agg fail so return null");
logger.warn(currentClassName + " split to query to top plan and agg fail so return null");
return null;
}
Expand Down Expand Up @@ -115,7 +124,8 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
true);
if (rewrittenQueryGroupExpr.isEmpty()) {
// can not rewrite, bail out.
logger.debug(currentClassName + " can not rewrite expression when not need roll up");
materializationContext.recordFailReason(queryObjectId,
"can not rewrite expression when not need roll up");
return null;
}
return new LogicalProject<>(
Expand All @@ -130,13 +140,16 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
viewExpr -> viewExpr.anyMatch(expr -> expr instanceof AggregateFunction
&& ((AggregateFunction) expr).isDistinct()))) {
// if mv aggregate function contains distinct, can not roll up, bail out.
logger.debug(currentClassName + " view contains distinct function so can not roll up");
materializationContext.recordFailReason(queryObjectId,
"view contains distinct function so can not roll up");
return null;
}
// split the query top plan expressions to group expressions and functions, if can not, bail out.
Pair<Set<? extends Expression>, Set<? extends Expression>> queryGroupAndFunctionPair
= topPlanSplitToGroupAndFunction(queryTopPlanAndAggPair);
if (queryGroupAndFunctionPair == null) {
materializationContext.recordFailReason(queryObjectId,
"query top plan split to group by and function fail");
logger.warn(currentClassName + " query top plan split to group by and function fail so return null");
return null;
}
Expand Down Expand Up @@ -175,7 +188,8 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
queryToViewSlotMapping,
false);
if (rewrittenFunctionExpression == null) {
logger.debug(currentClassName + " roll up expression can not rewrite by view so return null");
materializationContext.recordFailReason(queryObjectId,
"roll up expression can not rewrite by view");
return null;
}
finalAggregateExpressions.add((NamedExpression) rewrittenFunctionExpression);
Expand All @@ -185,8 +199,8 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
ExpressionUtils.shuttleExpressionWithLineage(topExpression, queryTopPlan);
if (!mvExprToMvScanExprQueryBased.containsKey(queryGroupShuttledExpr)) {
// group expr can not rewrite by view
logger.debug(currentClassName
+ " view group expressions can not contains the query group by expression so return null");
materializationContext.recordFailReason(queryObjectId,
"view group expressions doesn't not contains the query group by expression");
return null;
}
groupRewrittenExprMap.put(queryGroupShuttledExpr,
Expand All @@ -199,8 +213,8 @@ protected Plan rewriteQueryByView(MatchMode matchMode,
queryToViewSlotMapping,
true);
if (rewrittenGroupExpression == null) {
logger.debug(currentClassName
+ " query top expression can not be rewritten by view so return null");
materializationContext.recordFailReason(queryObjectId,
"query top group expression can not be rewritten by view");
return null;
}
finalAggregateExpressions.add((NamedExpression) rewrittenGroupExpression);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.doris.mtmv.MTMVUtil;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.jobs.executor.Rewriter;
import org.apache.doris.nereids.memo.GroupExpression;
import org.apache.doris.nereids.rules.exploration.ExplorationRuleFactory;
import org.apache.doris.nereids.rules.exploration.mv.Predicates.SplitPredicate;
import org.apache.doris.nereids.rules.exploration.mv.mapping.EquivalenceClassSetMapping;
Expand All @@ -44,12 +45,15 @@
import org.apache.doris.nereids.trees.expressions.SlotReference;
import org.apache.doris.nereids.trees.expressions.literal.BooleanLiteral;
import org.apache.doris.nereids.trees.expressions.literal.Literal;
import org.apache.doris.nereids.trees.plans.AbstractPlan;
import org.apache.doris.nereids.trees.plans.JoinType;
import org.apache.doris.nereids.trees.plans.ObjectId;
import org.apache.doris.nereids.trees.plans.Plan;
import org.apache.doris.nereids.trees.plans.algebra.CatalogRelation;
import org.apache.doris.nereids.trees.plans.logical.LogicalFilter;
import org.apache.doris.nereids.trees.plans.logical.LogicalOlapScan;
import org.apache.doris.nereids.util.ExpressionUtils;
import org.apache.doris.planner.PlanNodeId;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
Expand Down Expand Up @@ -94,13 +98,13 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
logger.debug(currentClassName + " queryStructInfo is not valid so return");
return rewriteResults;
}

ObjectId queryObjectId = queryPlan.getGroupExpression()
.map(GroupExpression::getId).orElseGet(() -> new ObjectId(-1));
for (MaterializationContext materializationContext : materializationContexts) {
// already rewrite, bail out
if (queryPlan.getGroupExpression().isPresent()
&& materializationContext.alreadyRewrite(
queryPlan.getGroupExpression().get().getOwnerGroup().getGroupId())) {
logger.debug(currentClassName + " this group is already rewritten so skip");
continue;
}
List<StructInfo> viewStructInfos = extractStructInfo(materializationContext.getMvPlan(),
Expand All @@ -112,34 +116,33 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
}
StructInfo viewStructInfo = viewStructInfos.get(0);
if (!checkPattern(viewStructInfo)) {
logger.debug(currentClassName + " viewStructInfo is not valid so return");
materializationContext.recordFailReason(queryObjectId, "viewStructInfo is not valid");
continue;
}
MatchMode matchMode = decideMatchMode(queryStructInfo.getRelations(), viewStructInfo.getRelations());
if (MatchMode.COMPLETE != matchMode) {
logger.debug(currentClassName + " match mode is not complete so return");
materializationContext.recordFailReason(queryObjectId,"match mode is not 'COMPLETE'");
continue;
}
List<RelationMapping> queryToViewTableMappings =
RelationMapping.generate(queryStructInfo.getRelations(), viewStructInfo.getRelations());
// if any relation in query and view can not map, bail out.
if (queryToViewTableMappings == null) {
logger.warn(currentClassName + " query to view table mapping null so return");
materializationContext.recordFailReason(queryObjectId,"query to view table mapping null");
return rewriteResults;
}
for (RelationMapping queryToViewTableMapping : queryToViewTableMappings) {
SlotMapping queryToViewSlotMapping = SlotMapping.generate(queryToViewTableMapping);
if (queryToViewSlotMapping == null) {
logger.warn(currentClassName + " query to view slot mapping null so continue");
materializationContext.recordFailReason(queryObjectId,"query to view slot mapping null");
continue;
}
LogicalCompatibilityContext compatibilityContext =
LogicalCompatibilityContext.from(queryToViewTableMapping, queryToViewSlotMapping,
queryStructInfo, viewStructInfo);
queryStructInfo, viewStructInfo, materializationContext);
ComparisonResult comparisonResult = StructInfo.isGraphLogicalEquals(queryStructInfo, viewStructInfo,
compatibilityContext);
if (comparisonResult.isInvalid()) {
logger.debug(currentClassName + " graph logical is not equals so continue");
continue;
}
// TODO: Use set of list? And consider view expr
Expand All @@ -152,7 +155,9 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
queryToViewSlotMapping);
// Can not compensate, bail out
if (compensatePredicates.isEmpty()) {
logger.debug(currentClassName + " predicate compensate fail so continue");
materializationContext.recordFailReason(queryObjectId,"predicate compensate fail,"
+ "query predicates = " + queryStructInfo.getPredicates() + ","
+ "view predicates = " + viewStructInfo.getPredicates());
continue;
}
Plan rewrittenPlan;
Expand All @@ -168,7 +173,10 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
queryToViewSlotMapping,
true);
if (rewriteCompensatePredicates.isEmpty()) {
logger.debug(currentClassName + " compensate predicate rewrite by view fail so continue");
materializationContext.recordFailReason(queryObjectId,"rewrite compensate predicate fail,"
+ "compensatePredicates = " + compensatePredicates + ","
+ "mvExprToMvScanExprMapping = "
+ materializationContext.getMvExprToMvScanExprMapping());
continue;
}
rewrittenPlan = new LogicalFilter<>(Sets.newHashSet(rewriteCompensatePredicates), mvScan);
Expand All @@ -181,15 +189,14 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
rewrittenPlan,
materializationContext);
if (rewrittenPlan == null) {
logger.debug(currentClassName + " rewrite query by view fail so continue");
materializationContext.recordFailReason(queryObjectId,"rewrite query by view fail");
continue;
}
if (!checkPartitionIsValid(queryStructInfo, materializationContext, cascadesContext)) {
logger.debug(currentClassName + " check partition validation fail so continue");
materializationContext.recordFailReason(queryObjectId,"check partition validation fail");
continue;
}
if (!checkOutput(queryPlan, rewrittenPlan)) {
logger.debug(currentClassName + " check output validation fail so continue");
if (!checkOutput(queryPlan, rewrittenPlan, materializationContext)) {
continue;
}
// run rbo job on mv rewritten plan
Expand All @@ -198,16 +205,23 @@ protected List<Plan> rewrite(Plan queryPlan, CascadesContext cascadesContext) {
cascadesContext.getCurrentJobContext().getRequiredProperties());
Rewriter.getWholeTreeRewriter(rewrittenPlanContext).execute();
rewrittenPlan = rewrittenPlanContext.getRewritePlan();
logger.debug(currentClassName + "rewrite by materialized view success");
materializationContext.setSuccess(true);
rewriteResults.add(rewrittenPlan);
}
}
return rewriteResults;
}

protected boolean checkOutput(Plan sourcePlan, Plan rewrittenPlan) {
protected boolean checkOutput(Plan sourcePlan, Plan rewrittenPlan, MaterializationContext materializationContext) {
if (sourcePlan.getGroupExpression().isPresent() && !rewrittenPlan.getLogicalProperties().equals(
sourcePlan.getGroupExpression().get().getOwnerGroup().getLogicalProperties())) {
ObjectId queryObjectId = sourcePlan.getGroupExpression()
.map(GroupExpression::getId).orElseGet(() -> new ObjectId(-1));
materializationContext.recordFailReason(queryObjectId,
"rewrittenPlan output logical properties is not same with target group, "
+ "planOutput: " + rewrittenPlan.getLogicalProperties() + "," +
"groupOutput: " + sourcePlan.getGroupExpression().get()
.getOwnerGroup().getLogicalProperties());
logger.error("rewrittenPlan output logical properties is not same with target group");
return false;
}
Expand Down
Loading

0 comments on commit db56257

Please sign in to comment.