Skip to content

Commit

Permalink
[chore](Nereids) remove enable_fallback_to_original_planner variable (a…
Browse files Browse the repository at this point in the history
  • Loading branch information
morrySnow authored Sep 12, 2024
1 parent de90051 commit e20030c
Show file tree
Hide file tree
Showing 33 changed files with 16 additions and 110 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,6 @@ public void before() throws JobException {
ctx.setDatabase(currentDb);
}
TUniqueId queryId = generateQueryId(UUID.randomUUID().toString());
ctx.getSessionVariable().enableFallbackToOriginalPlanner = false;
stmtExecutor = new StmtExecutor(ctx, (String) null);
ctx.setQueryId(queryId);
if (StringUtils.isNotEmpty(sql)) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ private AutoCloseConnectContext buildConnectContext() {
connectContext.setSessionVariable(exportJob.getSessionVariables());
// The rollback to the old optimizer is prohibited
// Since originStmt is empty, reverting to the old optimizer when the new optimizer is enabled is meaningless.
connectContext.getSessionVariable().enableFallbackToOriginalPlanner = false;
connectContext.setEnv(Env.getCurrentEnv());
connectContext.setDatabase(exportJob.getTableName().getDb());
connectContext.setQualifiedUser(exportJob.getQualifiedUser());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ public static ConnectContext createMTMVContext(MTMV mtmv) {
ctx.setCurrentUserIdentity(UserIdentity.ADMIN);
ctx.getState().reset();
ctx.setThreadLocalInfo();
ctx.getSessionVariable().enableFallbackToOriginalPlanner = false;
ctx.getSessionVariable().allowModifyMaterializedViewData = true;
Optional<String> workloadGroup = mtmv.getWorkloadGroup();
if (workloadGroup.isPresent()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ public List<Rule> buildRules() {
return fileSink.withOutputExprs(output);
})
),
// TODO: bind hive taget table
// TODO: bind hive target table
RuleType.BINDING_INSERT_HIVE_TABLE.build(unboundHiveTableSink().thenApply(this::bindHiveTableSink)),
RuleType.BINDING_INSERT_ICEBERG_TABLE.build(
unboundIcebergTableSink().thenApply(this::bindIcebergTableSink)),
Expand Down Expand Up @@ -593,11 +593,6 @@ private Pair<Database, OlapTable> bind(CascadesContext cascadesContext, UnboundT
Pair<DatabaseIf<?>, TableIf> pair = RelationUtil.getDbAndTable(tableQualifier,
cascadesContext.getConnectContext().getEnv());
if (!(pair.second instanceof OlapTable)) {
try {
cascadesContext.getConnectContext().getSessionVariable().enableFallbackToOriginalPlannerOnce();
} catch (Exception e) {
throw new AnalysisException("fall back failed");
}
throw new AnalysisException("the target table of insert into is not an OLAP table");
}
return Pair.of(((Database) pair.first), (OlapTable) pair.second);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.exceptions.MustFallbackException;
import org.apache.doris.nereids.hint.Hint;
import org.apache.doris.nereids.hint.LeadingHint;
import org.apache.doris.nereids.hint.OrderedHint;
Expand Down Expand Up @@ -111,12 +112,7 @@ private void setVar(SelectHintSetVar selectHint, StatementContext context) {
// enable_fallback_to_original_planner=true and revert it after executing.
// throw exception to fall back to original planner
if (!sessionVariable.isEnableNereidsPlanner()) {
try {
sessionVariable.enableFallbackToOriginalPlannerOnce();
} catch (Throwable t) {
throw new AnalysisException("failed to set fallback to original planner to true", t);
}
throw new AnalysisException("The nereids is disabled in this sql, fallback to original planner");
throw new MustFallbackException("The nereids is disabled in this sql, fallback to original planner");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,7 +300,7 @@ public void executeQuery(MysqlCommand mysqlCommand, String originStmt) throws Ex
}
}

if (stmts == null && !ctx.getSessionVariable().enableFallbackToOriginalPlanner) {
if (stmts == null) {
String errMsg;
Throwable exception = null;
if (nereidsParseException != null) {
Expand Down Expand Up @@ -333,12 +333,6 @@ public void executeQuery(MysqlCommand mysqlCommand, String originStmt) throws Ex
try {
stmts = parse(convertedStmt);
} catch (Throwable throwable) {
// if NereidsParser and oldParser both failed,
// prove is a new feature implemented only on the nereids,
// so an error message for the new nereids is thrown
if (nereidsSyntaxException != null) {
throwable = nereidsSyntaxException;
}
// Parse sql failed, audit it and return
handleQueryException(throwable, convertedStmt, null, null);
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1440,7 +1440,7 @@ public void setEnableLeftZigZag(boolean enableLeftZigZag) {
// This variable is used to avoid FE fallback to the original parser. When we execute SQL in regression tests
// for nereids, fallback will cause the Doris return the correct result although the syntax is unsupported
// in nereids for some mistaken modification. You should set it on the
@VariableMgr.VarAttr(name = ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, needForward = true)
@VariableMgr.VarAttr(name = ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, varType = VariableAnnotation.REMOVED)
public boolean enableFallbackToOriginalPlanner = false;

@VariableMgr.VarAttr(name = ENABLE_NEREIDS_TIMEOUT, needForward = true)
Expand Down Expand Up @@ -4056,15 +4056,6 @@ public void disableStrictConsistencyDmlOnce() throws DdlException {
new SetVar(SessionVariable.ENABLE_STRICT_CONSISTENCY_DML, new StringLiteral("false")));
}

public void enableFallbackToOriginalPlannerOnce() throws DdlException {
if (enableFallbackToOriginalPlanner) {
return;
}
setIsSingleSetVar(true);
VariableMgr.setVar(this,
new SetVar(SessionVariable.ENABLE_FALLBACK_TO_ORIGINAL_PLANNER, new StringLiteral("true")));
}

public void disableConstantFoldingByBEOnce() throws DdlException {
if (!enableFoldConstantByBe) {
return;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -618,8 +618,7 @@ public void execute(TUniqueId queryId) throws Exception {
throw new AnalysisException(e.getMessage());
}
if (e instanceof NereidsException
&& !(((NereidsException) e).getException() instanceof MustFallbackException)
&& !context.getSessionVariable().enableFallbackToOriginalPlanner) {
&& !(((NereidsException) e).getException() instanceof MustFallbackException)) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
context.getState().setError(e.getMessage());
return;
Expand Down Expand Up @@ -3525,7 +3524,7 @@ public HttpStreamParams generateHttpStreamPlan(TUniqueId queryId) throws Excepti
throw ((NereidsException) e).getException();
}
if (e instanceof NereidsException
&& !context.getSessionVariable().enableFallbackToOriginalPlanner) {
&& !(((NereidsException) e).getException() instanceof MustFallbackException)) {
LOG.warn("Analyze failed. {}", context.getQueryIdentifier(), e);
throw ((NereidsException) e).getException();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ public class BulkLoadDataDescTest extends TestWithFeService {
@Override
protected void runBeforeAll() throws Exception {
connectContext.getState().setNereids(true);
connectContext.getSessionVariable().enableFallbackToOriginalPlanner = false;
connectContext.getSessionVariable().enableNereidsTimeout = false;
FeConstants.runningUnitTest = true;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ public static void teardown() {
public void test() throws Exception {
ConnectContext ctx = UtFrameUtils.createDefaultCtx();
ctx.getSessionVariable().setEnableNereidsPlanner(false);
ctx.getSessionVariable().enableFallbackToOriginalPlanner = true;
ctx.getSessionVariable().setEnableFoldConstantByBe(false);
// create database db1
createDatabase(ctx, "create database db1;");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,6 @@ public class HiveDDLAndDMLPlanTest extends TestWithFeService {

@Override
protected void runBeforeAll() throws Exception {
connectContext.getSessionVariable().enableFallbackToOriginalPlanner = false;
Config.enable_query_hive_views = false;
// create test internal table
createDatabase(mockedDbName);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,6 @@ public void testQueryView() {
SessionVariable sv = connectContext.getSessionVariable();
Assertions.assertNotNull(sv);
sv.setEnableNereidsPlanner(true);
sv.enableFallbackToOriginalPlanner = false;

createDbAndTableForHmsCatalog((HMSExternalCatalog) env.getCatalogMgr().getCatalog(HMS_CATALOG));
queryViews(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import org.apache.doris.catalog.Env;
import org.apache.doris.nereids.NereidsPlanner;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.exceptions.MustFallbackException;
import org.apache.doris.nereids.parser.NereidsParser;
import org.apache.doris.nereids.properties.PhysicalProperties;
import org.apache.doris.qe.ConnectContext;
Expand Down Expand Up @@ -59,13 +59,11 @@ public void testFallbackToOriginalPlanner() throws Exception {
SessionVariable sv = ctx.getSessionVariable();
Assertions.assertNotNull(sv);
sv.setEnableNereidsPlanner(true);
sv.enableFallbackToOriginalPlanner = false;
Assertions.assertThrows(AnalysisException.class, () -> new NereidsPlanner(statementContext)
Assertions.assertThrows(MustFallbackException.class, () -> new NereidsPlanner(statementContext)
.planWithLock(new NereidsParser().parseSingle(sql), PhysicalProperties.ANY));

// manually recover sv
sv.setEnableNereidsPlanner(true);
sv.enableFallbackToOriginalPlanner = false;
StmtExecutor stmtExecutor = new StmtExecutor(ctx, sql);

new Expectations(stmtExecutor) {
Expand All @@ -77,6 +75,5 @@ public void testFallbackToOriginalPlanner() throws Exception {
stmtExecutor.execute();

Assertions.assertTrue(sv.isEnableNereidsPlanner());
Assertions.assertFalse(sv.enableFallbackToOriginalPlanner);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
suite("test_agg_state_nereids") {
sql "set enable_agg_state=true"
sql "set enable_nereids_planner=true;"
sql "set enable_fallback_to_original_planner=false;"

sql """ DROP TABLE IF EXISTS d_table; """
sql """
Expand Down Expand Up @@ -62,15 +61,12 @@ suite("test_agg_state_nereids") {
sql "insert into a_table select 1,max_by_state(1,3);"
sql "insert into a_table select 1,max_by_state(2,2);"
sql "insert into a_table select 1,max_by_state(3,1);"
sql 'set enable_fallback_to_original_planner=false'

qt_length1 """select k1,length(k2) from a_table order by k1;"""
qt_group1 """select k1,max_by_merge(k2) from a_table group by k1 order by k1;"""
qt_merge1 """select max_by_merge(k2) from a_table;"""

sql 'set enable_fallback_to_original_planner=true'
sql "insert into a_table select k1+1, max_by_state(k2,k1+10) from d_table;"
sql 'set enable_fallback_to_original_planner=false'

qt_length2 """select k1,length(k2) from a_table order by k1;"""
qt_group2 """select k1,max_by_merge(k2) from a_table group by k1 order by k1;"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,13 @@ suite("test_decimalv3") {
// nereids
sql "set enable_nereids_planner = true;"

sql """ set enable_fallback_to_original_planner=false """
sql "set enable_decimal256 = true;"
qt_decimal256_const_0 "select 1.4E-45;"
qt_decimal256_const_1 "select 1.4E-80;"
sql "set enable_decimal256 = false;"
qt_decimal256_const_2 "select 1.4E-45;"
qt_decimal256_const_3 "select 1.4E-80;"

sql """ set enable_fallback_to_original_planner=true """
sql "set enable_decimal256 = true;"
qt_decimal256_const_4 "select 1.4E-45;"
qt_decimal256_const_5 "select 1.4E-80;"
Expand Down
4 changes: 2 additions & 2 deletions regression-test/suites/javaudf_p0/test_javaudtf_int.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,8 @@ suite("test_javaudtf_int") {
qt_select1 """ SELECT user_id, varchar_col, e1 FROM ${tableName} lateral view udtf_int(user_id) temp as e1 order by user_id; """

test {
sql """ select /*+SET_VAR(enable_fallback_to_original_planner=true)*/ udtf_int(1); """
exception "UDTF function do not support this"
sql """ select udtf_int(1); """
exception "PROJECT can not contains TableGeneratingFunction expression: udtf_int(1)"
}
} finally {
try_sql("DROP FUNCTION IF EXISTS udtf_int(int);")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,6 @@
import org.codehaus.groovy.runtime.IOGroovyMethods

suite("test_json_predict_is_null", "p0") {
sql """ set experimental_enable_nereids_planner = false """

sql """ set experimental_enable_nereids_planner = true """
sql """ set enable_fallback_to_original_planner = true """

sql "DROP TABLE IF EXISTS j_pred"

sql """
Expand Down
3 changes: 0 additions & 3 deletions regression-test/suites/jsonb_p0/test_jsonb_cast.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,6 @@ suite("test_jsonb_cast", "p0") {
def testTable = "tbl_test_array_text_cast_jsonb"
def dataFile = "test_jsonb_cast.csv"

sql """ set experimental_enable_nereids_planner = true """
sql """ set enable_fallback_to_original_planner = true """

sql "DROP TABLE IF EXISTS ${testTable}"

sql """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,6 @@
import org.codehaus.groovy.runtime.IOGroovyMethods

suite("test_jsonb_predict_is_null", "p0") {
sql """ set experimental_enable_nereids_planner = false """

sql """ set experimental_enable_nereids_planner = true """
sql """ set enable_fallback_to_original_planner = true """

sql "DROP TABLE IF EXISTS jb_pred"

sql """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,6 @@ suite("test_jsonb_with_unescaped_string", "p0") {
def dataFile = "test_jsonb_unescaped.csv"
def dataFileJson = "test_jsonb_unescaped.json"

sql """ set experimental_enable_nereids_planner = true """
sql """ set enable_fallback_to_original_planner = true """

sql "DROP TABLE IF EXISTS ${testTable}"

sql """
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,17 +281,10 @@ suite('nereids_insert_no_partition') {
sql '''delete from uni_light_sc_mow_nop_t where id is not null'''
sql '''delete from uni_light_sc_mow_nop_t where id is null'''

// TODO turn off fallback when storage layer support true predicate
sql '''set enable_fallback_to_original_planner=true'''
sql '''delete from uni_mow_not_null_nop_t where id is not null'''
sql '''set enable_fallback_to_original_planner=false'''

sql '''delete from uni_mow_not_null_nop_t where id is null'''

// TODO turn off fallback when storage layer support true predicate
sql '''set enable_fallback_to_original_planner=true'''
sql '''delete from uni_light_sc_mow_not_null_nop_t where id is not null'''
sql '''set enable_fallback_to_original_planner=false'''

sql '''delete from uni_light_sc_mow_not_null_nop_t where id is null'''
sql 'alter table agg_light_sc_nop_t rename column ktinyint ktint'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,7 +281,6 @@ suite("nereids_partial_update_native_insert_stmt", "p0") {

sql "set enable_unique_key_partial_update=false;"
sql "set enable_insert_strict = false;"
sql "set enable_fallback_to_original_planner=true;"
sql "sync;"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,6 @@ suite("nereids_partial_update_native_insert_stmt_complex", "p0") {

sql "set enable_unique_key_partial_update=false;"
sql "set enable_insert_strict = false;"
sql "set enable_fallback_to_original_planner=true;"
sql "sync;"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,8 @@ suite("nereids_insert_unsupport_type") {
sql 'use nereids_insert_into_table_test'

sql 'set enable_nereids_planner=true'
sql 'set enable_fallback_to_original_planner=false'
sql 'set enable_nereids_dml=true'
sql 'set enable_strict_consistency_dml=true'

sql 'set enable_fallback_to_original_planner=true'

sql 'insert into map_t select id, kmintint from src'
sql 'sync'
sql 'select * from map_t'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

suite("test_timeout_fallback") {
sql "set enable_nereids_planner=true"
sql "set enable_fallback_to_original_planner=true"
sql "set enable_nereids_timeout=true"
sql "set nereids_timeout_second=-1"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@
// under the License.

suite("valid_grouping"){
sql "SET enable_fallback_to_original_planner=true"

// this suite test legacy planner
sql "set enable_nereids_planner=false"

sql "drop table if exists valid_grouping"
sql """
CREATE TABLE `valid_grouping` (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
suite("aggregate_strategies") {

def test_aggregate_strategies = { tableName, bucketNum ->
sql "SET enable_fallback_to_original_planner=true"

sql "drop table if exists $tableName"
sql """CREATE TABLE `$tableName` (
`id` int(11) NOT NULL,
Expand All @@ -40,10 +38,6 @@ suite("aggregate_strategies") {
sql "insert into $tableName select number, concat('name_', number) from numbers('number'='5')"
sql "insert into $tableName select number, concat('name_', number) from numbers('number'='5')"


sql "SET enable_nereids_planner=true"
sql "SET enable_fallback_to_original_planner=false"

order_qt_count_all "select count(ALL *) from $tableName"
order_qt_count_all "select count(*) from $tableName"
order_qt_count_all_group_by "select count(*) from $tableName group by id"
Expand Down Expand Up @@ -200,6 +194,5 @@ suite("aggregate_strategies") {

qt_sql_distinct_same_col """SELECT COUNT(DISTINCT id, id) FROM test_bucket10_table GROUP BY id """

sql "set experimental_enable_pipeline_engine=true"
qt_sql_distinct_same_col2 """SELECT COUNT(DISTINCT id, id) FROM test_bucket10_table GROUP BY id """
}
Loading

0 comments on commit e20030c

Please sign in to comment.