From af6144bd967e6263be192275be1ca09fd39625ad Mon Sep 17 00:00:00 2001 From: wudi <676366545@qq.com> Date: Fri, 13 Sep 2024 11:18:50 +0800 Subject: [PATCH] remove cdc depend and case, some example, and change identifier --- flink-doris-connector/pom.xml | 82 --- .../doris/flink/table/DorisConfigOptions.java | 2 +- .../doris/flink/tools/cdc/CdcTools.java | 231 ------- .../doris/flink/tools/cdc/DatabaseSync.java | 653 ------------------ .../flink/tools/cdc/DatabaseSyncConfig.java | 97 --- .../flink/tools/cdc/JdbcSourceSchema.java | 100 --- .../tools/cdc/ParsingProcessFunction.java | 71 -- .../flink/tools/cdc/db2/Db2DatabaseSync.java | 246 ------- .../flink/tools/cdc/db2/Db2DateConverter.java | 133 ---- .../doris/flink/tools/cdc/db2/Db2Schema.java | 54 -- ...orisJsonDebeziumDeserializationSchema.java | 197 ------ .../cdc/mongodb/ChangeStreamConstant.java | 42 -- .../cdc/mongodb/MongoDBDatabaseSync.java | 220 ------ .../tools/cdc/mongodb/MongoDBSchema.java | 145 ---- .../flink/tools/cdc/mongodb/MongoDBType.java | 128 ---- .../tools/cdc/mongodb/MongoDateConverter.java | 38 - .../mongodb/MongoParsingProcessFunction.java | 44 -- .../MongoDBJsonDebeziumSchemaSerializer.java | 211 ------ .../MongoJsonDebeziumDataChange.java | 163 ----- .../MongoJsonDebeziumSchemaChange.java | 198 ------ .../cdc/mysql/DateToStringConverter.java | 173 ----- .../tools/cdc/mysql/MysqlDatabaseSync.java | 271 -------- .../flink/tools/cdc/mysql/MysqlSchema.java | 40 -- .../tools/cdc/oracle/OracleDatabaseSync.java | 262 ------- .../tools/cdc/oracle/OracleDateConverter.java | 151 ---- .../flink/tools/cdc/oracle/OracleSchema.java | 60 -- .../cdc/postgres/PostgresDatabaseSync.java | 252 ------- .../cdc/postgres/PostgresDateConverter.java | 133 ---- .../tools/cdc/postgres/PostgresSchema.java | 45 -- .../cdc/sqlserver/SqlServerDatabaseSync.java | 229 ------ .../cdc/sqlserver/SqlServerDateConverter.java | 113 --- .../tools/cdc/sqlserver/SqlServerSchema.java | 45 -- .../flink/container/AbstractE2EService.java | 155 ----- .../container/e2e/Mysql2DorisE2ECase.java | 393 ----------- .../flink/example/CDCSchemaChangeExample.java | 97 --- .../doris/flink/example/CatalogExample.java | 50 -- .../example/DorisDateAndTimestampSqlTest.java | 71 -- .../DorisIntranetAccessSinkExample.java | 111 --- .../example/DorisSinkArraySQLExample.java | 138 ---- .../flink/example/DorisSinkBatchExample.java | 161 ----- .../doris/flink/example/DorisSinkExample.java | 92 --- .../example/DorisSinkExampleRowData.java | 111 --- .../example/DorisSinkMultiTableExample.java | 115 --- .../flink/example/DorisSinkSQLExample.java | 63 -- .../DorisSinkStreamMultiTableExample.java | 120 ---- .../flink/example/DorisSourceDataStream.java | 46 -- .../flink/example/DorisSourceExample.java | 64 -- .../flink/example/DorisSourceSinkExample.java | 145 ---- .../flink/example/LookupJoinCdcExample.java | 83 --- .../flink/example/LookupJoinExample.java | 93 --- .../tools/cdc/CdcDb2SyncDatabaseCase.java | 100 --- .../tools/cdc/CdcMongoSyncDatabaseCase.java | 97 --- .../tools/cdc/CdcMysqlSyncDatabaseCase.java | 105 --- .../tools/cdc/CdcOraclelSyncDatabaseCase.java | 95 --- .../cdc/CdcPostgresSyncDatabaseCase.java | 99 --- .../cdc/CdcSqlServerSyncDatabaseCase.java | 98 --- .../doris/flink/tools/cdc/CdcToolsTest.java | 118 ---- .../flink/tools/cdc/DatabaseSyncTest.java | 269 -------- .../tools/cdc/mongodb/MongoDBSchemaTest.java | 159 ----- .../tools/cdc/mongodb/MongoDBTypeTest.java | 139 ---- .../cdc/mongodb/MongoDateConverterTest.java | 32 - .../MongoParsingProcessFunctionTest.java | 35 - .../flink/utils/DateToStringConverter.java | 155 ----- 63 files changed, 1 insertion(+), 8437 deletions(-) delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/CdcTools.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSyncConfig.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/JdbcSourceSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/ParsingProcessFunction.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DateConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2Schema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/deserialize/DorisJsonDebeziumDeserializationSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/ChangeStreamConstant.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBDatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBType.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunction.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoDBJsonDebeziumSchemaSerializer.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumDataChange.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumSchemaChange.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/DateToStringConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlDatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDateConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDateConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresSchema.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDatabaseSync.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDateConverter.java delete mode 100644 flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerSchema.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/container/AbstractE2EService.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/container/e2e/Mysql2DorisE2ECase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/CDCSchemaChangeExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/CatalogExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisDateAndTimestampSqlTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisIntranetAccessSinkExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkArraySQLExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkBatchExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExampleRowData.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkMultiTableExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkSQLExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkStreamMultiTableExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceDataStream.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceSinkExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinCdcExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinExample.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcDb2SyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMongoSyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMysqlSyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcOraclelSyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcPostgresSyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcSqlServerSyncDatabaseCase.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcToolsTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/DatabaseSyncTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchemaTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBTypeTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverterTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunctionTest.java delete mode 100644 flink-doris-connector/src/test/java/org/apache/doris/flink/utils/DateToStringConverter.java diff --git a/flink-doris-connector/pom.xml b/flink-doris-connector/pom.xml index d773339b3..2cdfbe810 100644 --- a/flink-doris-connector/pom.xml +++ b/flink-doris-connector/pom.xml @@ -277,94 +277,12 @@ under the License. ${log4j.version} test - - - org.apache.flink - flink-sql-connector-mysql-cdc - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - - org.apache.flink - flink-sql-connector-oracle-cdc - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - - org.apache.flink - flink-sql-connector-postgres-cdc - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - - org.apache.flink - flink-sql-connector-sqlserver-cdc - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - - org.apache.flink - flink-sql-connector-db2-cdc - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - - org.apache.flink - flink-sql-connector-mongodb-cdc - - ${flink.sql.cdc.version} - provided - - - flink-shaded-guava - org.apache.flink - - - - mysql mysql-connector-java ${mysql.driver.version} test - - com.oracle.ojdbc - ojdbc8 - ${ojdbc.version} - provided - - org.apache.flink flink-runtime-web diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/table/DorisConfigOptions.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/table/DorisConfigOptions.java index 2a1c9b1a4..3fbe57a37 100644 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/table/DorisConfigOptions.java +++ b/flink-doris-connector/src/main/java/org/apache/doris/flink/table/DorisConfigOptions.java @@ -43,7 +43,7 @@ @PublicEvolving public class DorisConfigOptions { - public static final String IDENTIFIER = "doris"; + public static final String IDENTIFIER = "selectdb-preview"; // common option public static final ConfigOption FENODES = ConfigOptions.key("fenodes") diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/CdcTools.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/CdcTools.java deleted file mode 100644 index 61beea194..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/CdcTools.java +++ /dev/null @@ -1,231 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.annotation.VisibleForTesting; -import org.apache.flink.api.java.utils.MultipleParameterTool; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.core.execution.JobClient; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import org.apache.doris.flink.tools.cdc.db2.Db2DatabaseSync; -import org.apache.doris.flink.tools.cdc.mongodb.MongoDBDatabaseSync; -import org.apache.doris.flink.tools.cdc.mysql.MysqlDatabaseSync; -import org.apache.doris.flink.tools.cdc.oracle.OracleDatabaseSync; -import org.apache.doris.flink.tools.cdc.postgres.PostgresDatabaseSync; -import org.apache.doris.flink.tools.cdc.sqlserver.SqlServerDatabaseSync; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** cdc sync tools. */ -public class CdcTools { - private static final List EMPTY_KEYS = - Collections.singletonList(DatabaseSyncConfig.PASSWORD); - private static StreamExecutionEnvironment flinkEnvironmentForTesting; - private static JobClient jobClient; - - public static void main(String[] args) throws Exception { - System.out.println("Input args: " + Arrays.asList(args) + ".\n"); - String operation = args[0].toLowerCase(); - String[] opArgs = Arrays.copyOfRange(args, 1, args.length); - MultipleParameterTool params = MultipleParameterTool.fromArgs(opArgs); - switch (operation) { - case DatabaseSyncConfig.MYSQL_SYNC_DATABASE: - createMySQLSyncDatabase(params); - break; - case DatabaseSyncConfig.ORACLE_SYNC_DATABASE: - createOracleSyncDatabase(params); - break; - case DatabaseSyncConfig.POSTGRES_SYNC_DATABASE: - createPostgresSyncDatabase(params); - break; - case DatabaseSyncConfig.SQLSERVER_SYNC_DATABASE: - createSqlServerSyncDatabase(params); - break; - case DatabaseSyncConfig.MONGODB_SYNC_DATABASE: - createMongoDBSyncDatabase(params); - break; - case DatabaseSyncConfig.DB2_SYNC_DATABASE: - createDb2SyncDatabase(params); - break; - default: - System.out.println("Unknown operation " + operation); - System.exit(1); - } - } - - private static void createMySQLSyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.MYSQL_CONF)); - Map mysqlMap = getConfigMap(params, DatabaseSyncConfig.MYSQL_CONF); - Configuration mysqlConfig = Configuration.fromMap(mysqlMap); - DatabaseSync databaseSync = new MysqlDatabaseSync(); - syncDatabase(params, databaseSync, mysqlConfig, SourceConnector.MYSQL); - } - - private static void createOracleSyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.ORACLE_CONF)); - Map oracleMap = getConfigMap(params, DatabaseSyncConfig.ORACLE_CONF); - Configuration oracleConfig = Configuration.fromMap(oracleMap); - DatabaseSync databaseSync = new OracleDatabaseSync(); - syncDatabase(params, databaseSync, oracleConfig, SourceConnector.ORACLE); - } - - private static void createPostgresSyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.POSTGRES_CONF)); - Map postgresMap = getConfigMap(params, DatabaseSyncConfig.POSTGRES_CONF); - Configuration postgresConfig = Configuration.fromMap(postgresMap); - DatabaseSync databaseSync = new PostgresDatabaseSync(); - syncDatabase(params, databaseSync, postgresConfig, SourceConnector.POSTGRES); - } - - private static void createSqlServerSyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.SQLSERVER_CONF)); - Map postgresMap = getConfigMap(params, DatabaseSyncConfig.SQLSERVER_CONF); - Configuration postgresConfig = Configuration.fromMap(postgresMap); - DatabaseSync databaseSync = new SqlServerDatabaseSync(); - syncDatabase(params, databaseSync, postgresConfig, SourceConnector.SQLSERVER); - } - - private static void createMongoDBSyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.MONGODB_CONF)); - Map mongoMap = getConfigMap(params, DatabaseSyncConfig.MONGODB_CONF); - Configuration mongoConfig = Configuration.fromMap(mongoMap); - DatabaseSync databaseSync = new MongoDBDatabaseSync(); - syncDatabase(params, databaseSync, mongoConfig, SourceConnector.MONGODB); - } - - private static void createDb2SyncDatabase(MultipleParameterTool params) throws Exception { - Preconditions.checkArgument(params.has(DatabaseSyncConfig.DB2_CONF)); - Map db2Map = getConfigMap(params, DatabaseSyncConfig.DB2_CONF); - Configuration db2Config = Configuration.fromMap(db2Map); - DatabaseSync databaseSync = new Db2DatabaseSync(); - syncDatabase(params, databaseSync, db2Config, SourceConnector.DB2); - } - - private static void syncDatabase( - MultipleParameterTool params, - DatabaseSync databaseSync, - Configuration config, - SourceConnector sourceConnector) - throws Exception { - String jobName = params.get(DatabaseSyncConfig.JOB_NAME); - String database = params.get(DatabaseSyncConfig.DATABASE); - String tablePrefix = params.get(DatabaseSyncConfig.TABLE_PREFIX); - String tableSuffix = params.get(DatabaseSyncConfig.TABLE_SUFFIX); - String includingTables = params.get(DatabaseSyncConfig.INCLUDING_TABLES); - String excludingTables = params.get(DatabaseSyncConfig.EXCLUDING_TABLES); - String multiToOneOrigin = params.get(DatabaseSyncConfig.MULTI_TO_ONE_ORIGIN); - String multiToOneTarget = params.get(DatabaseSyncConfig.MULTI_TO_ONE_TARGET); - String schemaChangeMode = params.get(DatabaseSyncConfig.SCHEMA_CHANGE_MODE); - boolean createTableOnly = params.has(DatabaseSyncConfig.CREATE_TABLE_ONLY); - boolean ignoreDefaultValue = params.has(DatabaseSyncConfig.IGNORE_DEFAULT_VALUE); - boolean ignoreIncompatible = params.has(DatabaseSyncConfig.IGNORE_INCOMPATIBLE); - boolean singleSink = params.has(DatabaseSyncConfig.SINGLE_SINK); - - Preconditions.checkArgument(params.has(DatabaseSyncConfig.SINK_CONF)); - Map sinkMap = getConfigMap(params, DatabaseSyncConfig.SINK_CONF); - DorisTableConfig tableConfig = - new DorisTableConfig(getConfigMap(params, DatabaseSyncConfig.TABLE_CONF)); - Configuration sinkConfig = Configuration.fromMap(sinkMap); - - StreamExecutionEnvironment env = - Objects.nonNull(flinkEnvironmentForTesting) - ? flinkEnvironmentForTesting - : StreamExecutionEnvironment.getExecutionEnvironment(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConfig) - .setTableConfig(tableConfig) - .setCreateTableOnly(createTableOnly) - .setSingleSink(singleSink) - .setIgnoreIncompatible(ignoreIncompatible) - .setSchemaChangeMode(schemaChangeMode) - .create(); - databaseSync.build(); - if (StringUtils.isNullOrWhitespaceOnly(jobName)) { - jobName = - String.format( - "%s-Doris Sync Database: %s", - sourceConnector.getConnectorName(), - config.getString( - DatabaseSyncConfig.DATABASE_NAME, DatabaseSyncConfig.DB)); - } - if (Objects.nonNull(flinkEnvironmentForTesting)) { - jobClient = env.executeAsync(); - } else { - env.execute(jobName); - } - } - - @VisibleForTesting - public static JobClient getJobClient() { - return jobClient; - } - - // Only for testing, please do not use it in actual environment - @VisibleForTesting - public static void setStreamExecutionEnvironmentForTesting( - StreamExecutionEnvironment environment) { - flinkEnvironmentForTesting = environment; - } - - @VisibleForTesting - public static Map getConfigMap(MultipleParameterTool params, String key) { - if (!params.has(key)) { - System.out.println( - "Can not find key [" - + key - + "] from args: " - + params.toMap().toString() - + ".\n"); - return null; - } - - Map map = new HashMap<>(); - for (String param : params.getMultiParameter(key)) { - String[] kv = param.split("=", 2); - if (kv.length == 2) { - map.put(kv[0].trim(), kv[1].trim()); - continue; - } else if (kv.length == 1 && EMPTY_KEYS.contains(kv[0])) { - map.put(kv[0].trim(), ""); - continue; - } - - System.out.println("Invalid " + key + " " + param + ".\n"); - return null; - } - return map; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSync.java deleted file mode 100644 index 7cd29506e..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSync.java +++ /dev/null @@ -1,653 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.datastream.DataStream; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.CollectionUtil; -import org.apache.flink.util.OutputTag; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import org.apache.doris.flink.catalog.doris.DorisSchemaFactory; -import org.apache.doris.flink.catalog.doris.DorisSystem; -import org.apache.doris.flink.catalog.doris.TableSchema; -import org.apache.doris.flink.cfg.DorisConnectionOptions; -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.exception.DorisSystemException; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.schema.SchemaChangeMode; -import org.apache.doris.flink.sink.writer.WriteMode; -import org.apache.doris.flink.sink.writer.serializer.DorisRecordSerializer; -import org.apache.doris.flink.sink.writer.serializer.JsonDebeziumSchemaSerializer; -import org.apache.doris.flink.table.DorisConfigOptions; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.Serializable; -import java.sql.Connection; -import java.sql.SQLException; -import java.sql.SQLSyntaxErrorException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; -import java.util.regex.Pattern; - -import static org.apache.flink.cdc.debezium.utils.JdbcUrlUtils.PROPERTIES_PREFIX; - -public abstract class DatabaseSync { - private static final Logger LOG = LoggerFactory.getLogger(DatabaseSync.class); - private static final String TABLE_NAME_OPTIONS = "table-name"; - - protected Configuration config; - - protected String database; - - protected TableNameConverter converter; - protected Pattern includingPattern; - protected Pattern excludingPattern; - protected Map multiToOneRulesPattern; - protected DorisTableConfig dorisTableConfig; - protected Configuration sinkConfig; - protected boolean ignoreDefaultValue; - protected boolean ignoreIncompatible; - - public StreamExecutionEnvironment env; - private boolean createTableOnly = false; - private boolean newSchemaChange = true; - private String schemaChangeMode; - protected String includingTables; - protected String excludingTables; - protected String multiToOneOrigin; - protected String multiToOneTarget; - protected String tablePrefix; - protected String tableSuffix; - protected boolean singleSink; - protected final Map tableMapping = new HashMap<>(); - - public abstract void registerDriver() throws SQLException; - - public abstract Connection getConnection() throws SQLException; - - public abstract List getSchemaList() throws Exception; - - public abstract DataStreamSource buildCdcSource(StreamExecutionEnvironment env); - - /** Get the prefix of a specific tableList, for example, mysql is database, oracle is schema. */ - public abstract String getTableListPrefix(); - - protected DatabaseSync() throws SQLException { - registerDriver(); - } - - public void create() { - this.includingPattern = includingTables == null ? null : Pattern.compile(includingTables); - this.excludingPattern = excludingTables == null ? null : Pattern.compile(excludingTables); - this.multiToOneRulesPattern = multiToOneRulesParser(multiToOneOrigin, multiToOneTarget); - this.converter = new TableNameConverter(tablePrefix, tableSuffix, multiToOneRulesPattern); - } - - public void build() throws Exception { - DorisConnectionOptions options = getDorisConnectionOptions(); - DorisSystem dorisSystem = new DorisSystem(options); - - List schemaList = getSchemaList(); - Preconditions.checkState( - !schemaList.isEmpty(), - "No tables to be synchronized. Please make sure whether the tables that need to be synchronized exist in the corresponding database or schema."); - - if (!StringUtils.isNullOrWhitespaceOnly(database) - && !dorisSystem.databaseExists(database)) { - LOG.info("database {} not exist, created", database); - dorisSystem.createDatabase(database); - } - List syncTables = new ArrayList<>(); - List> dorisTables = new ArrayList<>(); - - Set targetDbSet = new HashSet<>(); - for (SourceSchema schema : schemaList) { - syncTables.add(schema.getTableName()); - String targetDb = database; - // Synchronize multiple databases using the src database name - if (StringUtils.isNullOrWhitespaceOnly(targetDb)) { - targetDb = schema.getDatabaseName(); - targetDbSet.add(targetDb); - } - if (StringUtils.isNullOrWhitespaceOnly(database) - && !dorisSystem.databaseExists(targetDb)) { - LOG.info("database {} not exist, created", targetDb); - dorisSystem.createDatabase(targetDb); - } - String dorisTable = converter.convert(schema.getTableName()); - // Calculate the mapping relationship between upstream and downstream tables - tableMapping.put( - schema.getTableIdentifier(), String.format("%s.%s", targetDb, dorisTable)); - tryCreateTableIfAbsent(dorisSystem, targetDb, dorisTable, schema); - - if (!dorisTables.contains(Tuple2.of(targetDb, dorisTable))) { - dorisTables.add(Tuple2.of(targetDb, dorisTable)); - } - } - if (createTableOnly) { - System.out.println("Create table finished."); - System.exit(0); - } - LOG.info("table mapping: {}", tableMapping); - config.setString(TABLE_NAME_OPTIONS, getSyncTableList(syncTables)); - DataStreamSource streamSource = buildCdcSource(env); - if (singleSink) { - streamSource.sinkTo(buildDorisSink()); - } else { - SingleOutputStreamOperator parsedStream = - streamSource.process(buildProcessFunction()); - for (Tuple2 dbTbl : dorisTables) { - OutputTag recordOutputTag = - ParsingProcessFunction.createRecordOutputTag(dbTbl.f1); - DataStream sideOutput = parsedStream.getSideOutput(recordOutputTag); - int sinkParallel = - sinkConfig.getInteger( - DorisConfigOptions.SINK_PARALLELISM, sideOutput.getParallelism()); - String uidName = getUidName(targetDbSet, dbTbl); - sideOutput - .sinkTo(buildDorisSink(dbTbl.f0 + "." + dbTbl.f1)) - .setParallelism(sinkParallel) - .name(uidName) - .uid(uidName); - } - } - } - - /** - * @param targetDbSet The set of target databases. - * @param dbTbl The database-table tuple. - * @return The UID of the DataStream. - */ - public String getUidName(Set targetDbSet, Tuple2 dbTbl) { - String uidName; - // Determine whether to proceed with multi-database synchronization. - // if yes, the UID is composed of `dbname_tablename`, otherwise it is composed of - // `tablename`. - if (targetDbSet.size() > 1) { - uidName = dbTbl.f0 + "_" + dbTbl.f1; - } else { - uidName = dbTbl.f1; - } - - return uidName; - } - - private DorisConnectionOptions getDorisConnectionOptions() { - String fenodes = sinkConfig.getString(DorisConfigOptions.FENODES); - String benodes = sinkConfig.getString(DorisConfigOptions.BENODES); - String user = sinkConfig.getString(DorisConfigOptions.USERNAME); - String passwd = sinkConfig.getString(DorisConfigOptions.PASSWORD, ""); - String jdbcUrl = sinkConfig.getString(DorisConfigOptions.JDBC_URL); - Preconditions.checkNotNull(fenodes, "fenodes is empty in sink-conf"); - Preconditions.checkNotNull(user, "username is empty in sink-conf"); - Preconditions.checkNotNull(jdbcUrl, "jdbcurl is empty in sink-conf"); - DorisConnectionOptions.DorisConnectionOptionsBuilder builder = - new DorisConnectionOptions.DorisConnectionOptionsBuilder() - .withFenodes(fenodes) - .withBenodes(benodes) - .withUsername(user) - .withPassword(passwd) - .withJdbcUrl(jdbcUrl); - return builder.build(); - } - - /** create doris sink for multi table. */ - public DorisSink buildDorisSink() { - return buildDorisSink(null); - } - - public ParsingProcessFunction buildProcessFunction() { - return new ParsingProcessFunction(converter); - } - - /** create doris sink. */ - public DorisSink buildDorisSink(String tableIdentifier) { - String fenodes = sinkConfig.getString(DorisConfigOptions.FENODES); - String benodes = sinkConfig.getString(DorisConfigOptions.BENODES); - String user = sinkConfig.getString(DorisConfigOptions.USERNAME); - String passwd = sinkConfig.getString(DorisConfigOptions.PASSWORD, ""); - String jdbcUrl = sinkConfig.getString(DorisConfigOptions.JDBC_URL); - - DorisSink.Builder builder = DorisSink.builder(); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setJdbcUrl(jdbcUrl) - .setFenodes(fenodes) - .setBenodes(benodes) - .setUsername(user) - .setPassword(passwd); - sinkConfig - .getOptional(DorisConfigOptions.AUTO_REDIRECT) - .ifPresent(dorisBuilder::setAutoRedirect); - - // single sink not need table identifier - if (!singleSink && !StringUtils.isNullOrWhitespaceOnly(tableIdentifier)) { - dorisBuilder.setTableIdentifier(tableIdentifier); - } - - Properties pro = new Properties(); - // default json data format - pro.setProperty("format", "json"); - pro.setProperty("read_json_by_line", "true"); - // customer stream load properties - Properties streamLoadProp = DorisConfigOptions.getStreamLoadProp(sinkConfig.toMap()); - pro.putAll(streamLoadProp); - DorisExecutionOptions.Builder executionBuilder = - DorisExecutionOptions.builder().setStreamLoadProp(pro); - - sinkConfig - .getOptional(DorisConfigOptions.SINK_LABEL_PREFIX) - .ifPresent(executionBuilder::setLabelPrefix); - sinkConfig - .getOptional(DorisConfigOptions.SINK_ENABLE_DELETE) - .ifPresent(executionBuilder::setDeletable); - sinkConfig - .getOptional(DorisConfigOptions.SINK_BUFFER_COUNT) - .ifPresent(executionBuilder::setBufferCount); - sinkConfig - .getOptional(DorisConfigOptions.SINK_BUFFER_SIZE) - .ifPresent(v -> executionBuilder.setBufferSize((int) v.getBytes())); - sinkConfig - .getOptional(DorisConfigOptions.SINK_CHECK_INTERVAL) - .ifPresent(v -> executionBuilder.setCheckInterval((int) v.toMillis())); - sinkConfig - .getOptional(DorisConfigOptions.SINK_MAX_RETRIES) - .ifPresent(executionBuilder::setMaxRetries); - sinkConfig - .getOptional(DorisConfigOptions.SINK_IGNORE_UPDATE_BEFORE) - .ifPresent(executionBuilder::setIgnoreUpdateBefore); - - if (!sinkConfig.getBoolean(DorisConfigOptions.SINK_ENABLE_2PC)) { - executionBuilder.disable2PC(); - } else if (sinkConfig.getOptional(DorisConfigOptions.SINK_ENABLE_2PC).isPresent()) { - // force open 2pc - executionBuilder.enable2PC(); - } - - sinkConfig - .getOptional(DorisConfigOptions.SINK_ENABLE_BATCH_MODE) - .ifPresent(executionBuilder::setBatchMode); - sinkConfig - .getOptional(DorisConfigOptions.SINK_FLUSH_QUEUE_SIZE) - .ifPresent(executionBuilder::setFlushQueueSize); - sinkConfig - .getOptional(DorisConfigOptions.SINK_BUFFER_FLUSH_MAX_ROWS) - .ifPresent(executionBuilder::setBufferFlushMaxRows); - sinkConfig - .getOptional(DorisConfigOptions.SINK_BUFFER_FLUSH_MAX_BYTES) - .ifPresent(v -> executionBuilder.setBufferFlushMaxBytes((int) v.getBytes())); - sinkConfig - .getOptional(DorisConfigOptions.SINK_BUFFER_FLUSH_INTERVAL) - .ifPresent(v -> executionBuilder.setBufferFlushIntervalMs(v.toMillis())); - - sinkConfig - .getOptional(DorisConfigOptions.SINK_USE_CACHE) - .ifPresent(executionBuilder::setUseCache); - sinkConfig - .getOptional(DorisConfigOptions.SINK_WRITE_MODE) - .ifPresent(v -> executionBuilder.setWriteMode(WriteMode.of(v))); - sinkConfig - .getOptional(DorisConfigOptions.SINK_IGNORE_COMMIT_ERROR) - .ifPresent(executionBuilder::setIgnoreCommitError); - - DorisExecutionOptions executionOptions = executionBuilder.build(); - builder.setDorisReadOptions(DorisReadOptions.builder().build()) - .setDorisExecutionOptions(executionOptions) - .setSerializer(buildSchemaSerializer(dorisBuilder, executionOptions)) - .setDorisOptions(dorisBuilder.build()); - return builder.build(); - } - - public DorisRecordSerializer buildSchemaSerializer( - DorisOptions.Builder dorisBuilder, DorisExecutionOptions executionOptions) { - return JsonDebeziumSchemaSerializer.builder() - .setDorisOptions(dorisBuilder.build()) - .setNewSchemaChange(newSchemaChange) - .setSchemaChangeMode(schemaChangeMode) - .setExecutionOptions(executionOptions) - .setTableMapping(tableMapping) - .setDorisTableConf(dorisTableConfig) - .setTargetDatabase(database) - .setTargetTablePrefix(tablePrefix) - .setTargetTableSuffix(tableSuffix) - .build(); - } - - /** Filter table that need to be synchronized. */ - protected boolean isSyncNeeded(String tableName) { - boolean sync = true; - if (includingPattern != null) { - sync = includingPattern.matcher(tableName).matches(); - } - if (excludingPattern != null) { - sync = sync && !excludingPattern.matcher(tableName).matches(); - } - LOG.debug("table {} is synchronized? {}", tableName, sync); - return sync; - } - - protected String getSyncTableList(List syncTables) { - if (!singleSink) { - return String.format("(%s)\\.(%s)", getTableListPrefix(), String.join("|", syncTables)); - } else { - // includingTablePattern and ^excludingPattern - if (includingTables == null) { - includingTables = ".*"; - } - String includingPattern = - String.format("(%s)\\.(%s)", getTableListPrefix(), includingTables); - if (StringUtils.isNullOrWhitespaceOnly(excludingTables)) { - return includingPattern; - } else { - String excludingPattern = - String.format("?!(%s\\.(%s))$", getTableListPrefix(), excludingTables); - return String.format("(%s)(%s)", excludingPattern, includingPattern); - } - } - } - - /** Filter table that many tables merge to one. */ - protected HashMap multiToOneRulesParser( - String multiToOneOrigin, String multiToOneTarget) { - if (StringUtils.isNullOrWhitespaceOnly(multiToOneOrigin) - || StringUtils.isNullOrWhitespaceOnly(multiToOneTarget)) { - return null; - } - HashMap multiToOneRulesPattern = new HashMap<>(); - String[] origins = multiToOneOrigin.split("\\|"); - String[] targets = multiToOneTarget.split("\\|"); - if (origins.length != targets.length) { - System.out.println( - "param error : multi to one params length are not equal,please check your params."); - System.exit(1); - } - try { - for (int i = 0; i < origins.length; i++) { - multiToOneRulesPattern.put(Pattern.compile(origins[i]), targets[i]); - } - } catch (Exception e) { - System.out.println("param error : Your regular expression is incorrect,please check."); - System.exit(1); - } - return multiToOneRulesPattern; - } - - /** - * Get table buckets Map. - * - * @param tableBuckets the string of tableBuckets, eg:student:10,student_info:20,student.*:30 - * @return The table name and buckets map. The key is table name, the value is buckets. - */ - @Deprecated - public static Map getTableBuckets(String tableBuckets) { - Map tableBucketsMap = new LinkedHashMap<>(); - String[] tableBucketsArray = tableBuckets.split(","); - for (String tableBucket : tableBucketsArray) { - String[] tableBucketArray = tableBucket.split(":"); - tableBucketsMap.put( - tableBucketArray[0].trim(), Integer.parseInt(tableBucketArray[1].trim())); - } - return tableBucketsMap; - } - - /** - * Set table schema buckets. - * - * @param tableBucketsMap The table name and buckets map. The key is table name, the value is - * buckets. - * @param dorisSchema @{TableSchema} - * @param dorisTable the table name need to set buckets - * @param tableHasSet The buckets table is set - */ - @Deprecated - public void setTableSchemaBuckets( - Map tableBucketsMap, - TableSchema dorisSchema, - String dorisTable, - Set tableHasSet) { - - if (tableBucketsMap != null) { - // Firstly, if the table name is in the table-buckets map, set the buckets of the table. - if (tableBucketsMap.containsKey(dorisTable)) { - dorisSchema.setTableBuckets(tableBucketsMap.get(dorisTable)); - tableHasSet.add(dorisTable); - return; - } - // Secondly, iterate over the map to find a corresponding regular expression match, - for (Map.Entry entry : tableBucketsMap.entrySet()) { - if (tableHasSet.contains(entry.getKey())) { - continue; - } - - Pattern pattern = Pattern.compile(entry.getKey()); - if (pattern.matcher(dorisTable).matches()) { - dorisSchema.setTableBuckets(entry.getValue()); - tableHasSet.add(dorisTable); - return; - } - } - } - } - - private void tryCreateTableIfAbsent( - DorisSystem dorisSystem, String targetDb, String dorisTable, SourceSchema schema) { - if (!dorisSystem.tableExists(targetDb, dorisTable)) { - TableSchema dorisSchema = - DorisSchemaFactory.createTableSchema( - database, - dorisTable, - schema.getFields(), - schema.getPrimaryKeys(), - dorisTableConfig, - schema.getTableComment()); - try { - dorisSystem.createTable(dorisSchema); - } catch (Exception ex) { - handleTableCreationFailure(ex); - } - } - } - - private void handleTableCreationFailure(Exception ex) throws DorisSystemException { - if (ignoreIncompatible && ex.getCause() instanceof SQLSyntaxErrorException) { - LOG.warn( - "Doris schema and source table schema are not compatible. Error: {} ", - ex.getCause().toString()); - } else { - throw new DorisSystemException("Failed to create table due to: ", ex); - } - } - - protected Properties getJdbcProperties() { - Properties jdbcProps = new Properties(); - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(PROPERTIES_PREFIX)) { - jdbcProps.put(key.substring(PROPERTIES_PREFIX.length()), value); - } - } - return jdbcProps; - } - - protected String getJdbcUrlTemplate(String initialJdbcUrl, Properties jdbcProperties) { - StringBuilder jdbcUrlBuilder = new StringBuilder(initialJdbcUrl); - jdbcProperties.forEach( - (key, value) -> jdbcUrlBuilder.append("&").append(key).append("=").append(value)); - return jdbcUrlBuilder.toString(); - } - - public DatabaseSync setEnv(StreamExecutionEnvironment env) { - this.env = env; - return this; - } - - public DatabaseSync setConfig(Configuration config) { - this.config = config; - return this; - } - - public DatabaseSync setDatabase(String database) { - this.database = database; - return this; - } - - public DatabaseSync setIncludingTables(String includingTables) { - this.includingTables = includingTables; - return this; - } - - public DatabaseSync setExcludingTables(String excludingTables) { - this.excludingTables = excludingTables; - return this; - } - - public DatabaseSync setMultiToOneOrigin(String multiToOneOrigin) { - this.multiToOneOrigin = multiToOneOrigin; - return this; - } - - public DatabaseSync setMultiToOneTarget(String multiToOneTarget) { - this.multiToOneTarget = multiToOneTarget; - return this; - } - - @Deprecated - public DatabaseSync setTableConfig(Map tableConfig) { - if (!CollectionUtil.isNullOrEmpty(tableConfig)) { - this.dorisTableConfig = new DorisTableConfig(tableConfig); - } - return this; - } - - public DatabaseSync setTableConfig(DorisTableConfig tableConfig) { - this.dorisTableConfig = tableConfig; - return this; - } - - public DatabaseSync setSinkConfig(Configuration sinkConfig) { - this.sinkConfig = sinkConfig; - return this; - } - - public DatabaseSync setIgnoreDefaultValue(boolean ignoreDefaultValue) { - this.ignoreDefaultValue = ignoreDefaultValue; - return this; - } - - public DatabaseSync setCreateTableOnly(boolean createTableOnly) { - this.createTableOnly = createTableOnly; - return this; - } - - public DatabaseSync setNewSchemaChange(boolean newSchemaChange) { - this.newSchemaChange = newSchemaChange; - return this; - } - - public DatabaseSync setSchemaChangeMode(String schemaChangeMode) { - if (org.apache.commons.lang3.StringUtils.isEmpty(schemaChangeMode)) { - this.schemaChangeMode = SchemaChangeMode.DEBEZIUM_STRUCTURE.getName(); - return this; - } - this.schemaChangeMode = schemaChangeMode.trim(); - return this; - } - - public DatabaseSync setSingleSink(boolean singleSink) { - this.singleSink = singleSink; - return this; - } - - public DatabaseSync setIgnoreIncompatible(boolean ignoreIncompatible) { - this.ignoreIncompatible = ignoreIncompatible; - return this; - } - - public DatabaseSync setTablePrefix(String tablePrefix) { - this.tablePrefix = tablePrefix; - return this; - } - - public DatabaseSync setTableSuffix(String tableSuffix) { - this.tableSuffix = tableSuffix; - return this; - } - - public static class TableNameConverter implements Serializable { - private static final long serialVersionUID = 1L; - private final String prefix; - private final String suffix; - private Map multiToOneRulesPattern; - - TableNameConverter() { - this("", ""); - } - - TableNameConverter(String prefix, String suffix) { - this.prefix = prefix == null ? "" : prefix; - this.suffix = suffix == null ? "" : suffix; - } - - TableNameConverter( - String prefix, String suffix, Map multiToOneRulesPattern) { - this.prefix = prefix == null ? "" : prefix; - this.suffix = suffix == null ? "" : suffix; - this.multiToOneRulesPattern = multiToOneRulesPattern; - } - - public String convert(String tableName) { - if (multiToOneRulesPattern == null) { - return prefix + tableName + suffix; - } - - String target = null; - - for (Map.Entry patternStringEntry : - multiToOneRulesPattern.entrySet()) { - if (patternStringEntry.getKey().matcher(tableName).matches()) { - target = patternStringEntry.getValue(); - } - } - /** - * If multiToOneRulesPattern is not null and target is not assigned, then the - * synchronization task contains both multi to one and one to one , prefixes and - * suffixes are added to common one-to-one mapping tables - */ - if (target == null) { - return prefix + tableName + suffix; - } - return target; - } - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSyncConfig.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSyncConfig.java deleted file mode 100644 index e1a089ff5..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/DatabaseSyncConfig.java +++ /dev/null @@ -1,97 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -public class DatabaseSyncConfig { - - public static final String MYSQL_SYNC_DATABASE = "mysql-sync-database"; - public static final String ORACLE_SYNC_DATABASE = "oracle-sync-database"; - public static final String POSTGRES_SYNC_DATABASE = "postgres-sync-database"; - public static final String SQLSERVER_SYNC_DATABASE = "sqlserver-sync-database"; - public static final String MONGODB_SYNC_DATABASE = "mongodb-sync-database"; - public static final String DB2_SYNC_DATABASE = "db2-sync-database"; - - public static final String MYSQL_CONF = "mysql-conf"; - public static final String ORACLE_CONF = "oracle-conf"; - public static final String POSTGRES_CONF = "postgres-conf"; - public static final String SQLSERVER_CONF = "sqlserver-conf"; - public static final String MONGODB_CONF = "mongodb-conf"; - public static final String DB2_CONF = "db2-conf"; - - ///////////// source-conf //////// - public static final String DATABASE_NAME = "database-name"; - public static final String DB = "db"; - public static final String PORT = "port"; - public static final String USER = "user"; - public static final String PASSWORD = "password"; - public static final String TABLE_NAME = "TABLE_NAME"; - public static final String REMARKS = "REMARKS"; - - ////////// cdc-conf ////////// - // config options of {@link - // org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_STARTUP_MODE} - public static final String SCAN_STARTUP_MODE_VALUE_INITIAL = "initial"; - public static final String SCAN_STARTUP_MODE_VALUE_EARLIEST_OFFSET = "earliest-offset"; - public static final String SCAN_STARTUP_MODE_VALUE_SPECIFIC_OFFSET = "specific-offset"; - public static final String SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET = "latest-offset"; - public static final String SCAN_STARTUP_MODE_VALUE_TIMESTAMP = "timestamp"; - public static final String DECIMAL_HANDLING_MODE = "decimal.handling.mode"; - - ////////// sink-conf ///////////// - public static final String SINK_CONF = "sink-conf"; - public static final String JOB_NAME = "job-name"; - public static final String DATABASE = "database"; - public static final String TABLE_PREFIX = "table-prefix"; - public static final String TABLE_SUFFIX = "table-suffix"; - public static final String INCLUDING_TABLES = "including-tables"; - public static final String EXCLUDING_TABLES = "excluding-tables"; - public static final String MULTI_TO_ONE_ORIGIN = "multi-to-one-origin"; - public static final String MULTI_TO_ONE_TARGET = "multi-to-one-target"; - public static final String SCHEMA_CHANGE_MODE = "schema-change-mode"; - public static final String CREATE_TABLE_ONLY = "create-table-only"; - public static final String IGNORE_DEFAULT_VALUE = "ignore-default-value"; - public static final String IGNORE_INCOMPATIBLE = "ignore-incompatible"; - public static final String SINGLE_SINK = "single-sink"; - ////////// doris-table-conf ////////// - public static final String TABLE_CONF = "table-conf"; - - ////////// date-converter-conf ////////// - public static final String CONVERTERS = "converters"; - public static final String DATE = "date"; - public static final String DATE_TYPE = "date.type"; - public static final String DATE_FORMAT_DATE = "date.format.date"; - public static final String DATE_FORMAT_DATETIME = "date.format.datetime"; - public static final String DATE_FORMAT_TIMESTAMP = "date.format.timestamp"; - public static final String DATE_FORMAT_TIMESTAMP_ZONE = "date.format.timestamp.zone"; - public static final String YEAR_MONTH_DAY_FORMAT = "yyyy-MM-dd"; - public static final String DATE_TIME_FORMAT = "yyyy-MM-dd HH:mm:ss"; - public static final String DATETIME_MICRO_FORMAT = "yyyy-MM-dd HH:mm:ss.SSSSSS"; - public static final String TIME_ZONE_SHANGHAI = "Asia/Shanghai"; - public static final String TIME_ZONE_UTC_8 = "UTC+8"; - public static final String FORMAT_DATE = "format.date"; - public static final String FORMAT_TIME = "format.time"; - public static final String FORMAT_DATETIME = "format.datetime"; - public static final String FORMAT_TIMESTAMP = "format.timestamp"; - public static final String FORMAT_TIMESTAMP_ZONE = "format.timestamp.zone"; - public static final String UPPERCASE_DATE = "DATE"; - public static final String TIME = "TIME"; - public static final String DATETIME = "DATETIME"; - public static final String TIMESTAMP = "TIMESTAMP"; - public static final String SMALLDATETIME = "SMALLDATETIME"; - public static final String DATETIME2 = "DATETIME2"; -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/JdbcSourceSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/JdbcSourceSchema.java deleted file mode 100644 index 31cfd1cbf..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/JdbcSourceSchema.java +++ /dev/null @@ -1,100 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.util.Preconditions; - -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.DatabaseMetaData; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.LinkedHashMap; -import java.util.List; - -/** - * JdbcSourceSchema is a subclass of SourceSchema, used to build metadata about jdbc-related - * databases. - */ -public abstract class JdbcSourceSchema extends SourceSchema { - private static final Logger LOG = LoggerFactory.getLogger(JdbcSourceSchema.class); - - public JdbcSourceSchema( - DatabaseMetaData metaData, - String databaseName, - String schemaName, - String tableName, - String tableComment) - throws Exception { - super(databaseName, schemaName, tableName, tableComment); - fields = getColumnInfo(metaData, databaseName, schemaName, tableName); - primaryKeys = getPrimaryKeys(metaData, databaseName, schemaName, tableName); - } - - public LinkedHashMap getColumnInfo( - DatabaseMetaData metaData, String databaseName, String schemaName, String tableName) - throws SQLException { - LinkedHashMap fields = new LinkedHashMap<>(); - LOG.debug("Starting to get column info for table: {}", tableName); - try (ResultSet rs = metaData.getColumns(databaseName, schemaName, tableName, null)) { - while (rs.next()) { - String fieldName = rs.getString("COLUMN_NAME"); - String comment = rs.getString("REMARKS"); - String fieldType = rs.getString("TYPE_NAME"); - Integer precision = rs.getInt("COLUMN_SIZE"); - - if (rs.wasNull()) { - precision = null; - } - Integer scale = rs.getInt("DECIMAL_DIGITS"); - if (rs.wasNull()) { - scale = null; - } - String dorisTypeStr = null; - try { - dorisTypeStr = convertToDorisType(fieldType, precision, scale); - } catch (UnsupportedOperationException e) { - throw new UnsupportedOperationException(e + " in table: " + tableName); - } - fields.put(fieldName, new FieldSchema(fieldName, dorisTypeStr, comment)); - } - } - Preconditions.checkArgument(!fields.isEmpty(), "The column info of {} is empty", tableName); - LOG.debug("Successfully retrieved column info for table: {}", tableName); - return fields; - } - - public List getPrimaryKeys( - DatabaseMetaData metaData, String databaseName, String schemaName, String tableName) - throws SQLException { - List primaryKeys = new ArrayList<>(); - try (ResultSet rs = metaData.getPrimaryKeys(databaseName, schemaName, tableName)) { - while (rs.next()) { - String fieldName = rs.getString("COLUMN_NAME"); - primaryKeys.add(fieldName); - } - } - - return primaryKeys; - } - - public abstract String convertToDorisType(String fieldType, Integer precision, Integer scale); -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/ParsingProcessFunction.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/ParsingProcessFunction.java deleted file mode 100644 index 787d0ae1a..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/ParsingProcessFunction.java +++ /dev/null @@ -1,71 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.functions.ProcessFunction; -import org.apache.flink.util.Collector; -import org.apache.flink.util.OutputTag; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; - -import java.util.HashMap; -import java.util.Map; - -public class ParsingProcessFunction extends ProcessFunction { - protected ObjectMapper objectMapper = new ObjectMapper(); - private transient Map> recordOutputTags; - private DatabaseSync.TableNameConverter converter; - - public ParsingProcessFunction(DatabaseSync.TableNameConverter converter) { - this.converter = converter; - } - - @Override - public void open(Configuration parameters) throws Exception { - recordOutputTags = new HashMap<>(); - } - - @Override - public void processElement( - String record, ProcessFunction.Context context, Collector collector) - throws Exception { - String tableName = getRecordTableName(record); - String dorisName = converter.convert(tableName); - context.output(getRecordOutputTag(dorisName), record); - } - - protected String getRecordTableName(String record) throws Exception { - JsonNode recordRoot = objectMapper.readValue(record, JsonNode.class); - return extractJsonNode(recordRoot.get("source"), "table"); - } - - protected String extractJsonNode(JsonNode record, String key) { - return record != null && record.get(key) != null ? record.get(key).asText() : null; - } - - private OutputTag getRecordOutputTag(String tableName) { - return recordOutputTags.computeIfAbsent( - tableName, ParsingProcessFunction::createRecordOutputTag); - } - - public static OutputTag createRecordOutputTag(String tableName) { - return new OutputTag("record-" + tableName) {}; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DatabaseSync.java deleted file mode 100644 index 3947c1e16..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DatabaseSync.java +++ /dev/null @@ -1,246 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.db2; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions; -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.cdc.connectors.base.options.StartupOptions; -import org.apache.flink.cdc.connectors.base.source.jdbc.JdbcIncrementalSource; -import org.apache.flink.cdc.connectors.db2.Db2Source; -import org.apache.flink.cdc.connectors.db2.source.Db2SourceBuilder; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.DebeziumSourceFunction; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.table.DebeziumOptions; -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.Preconditions; - -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.deserialize.DorisJsonDebeziumDeserializationSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECTION_POOL_SIZE; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_MAX_RETRIES; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_TIMEOUT; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.SERVER_TIME_ZONE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.CHUNK_META_GROUP_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_SNAPSHOT_FETCH_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND; - -public class Db2DatabaseSync extends DatabaseSync { - public static final ConfigOption PORT = - ConfigOptions.key("port") - .intType() - .defaultValue(50000) - .withDescription("Integer port number of the DB2 database server."); - private static final Logger LOG = LoggerFactory.getLogger(Db2DatabaseSync.class); - - private static final String JDBC_URL = "jdbc:db2://%s:%d/%s"; - - public Db2DatabaseSync() throws SQLException { - super(); - } - - @Override - public void registerDriver() throws SQLException { - try { - Class.forName("com.ibm.db2.jcc.DB2Driver"); - LOG.info(" Loaded the JDBC driver"); - } catch (ClassNotFoundException ex) { - throw new SQLException( - "No suitable driver found, can not found class com.ibm.db2.jcc.DB2Driver"); - } - } - - @Override - public Connection getConnection() throws SQLException { - Properties jdbcProperties = getJdbcProperties(); - String jdbcUrlTemplate = getJdbcUrlTemplate(JDBC_URL, jdbcProperties); - String jdbcUrl = - String.format( - jdbcUrlTemplate, - config.get(JdbcSourceOptions.HOSTNAME), - config.get(PORT), - config.get(JdbcSourceOptions.DATABASE_NAME)); - Properties pro = new Properties(); - pro.setProperty("user", config.get(JdbcSourceOptions.USERNAME)); - pro.setProperty("password", config.get(JdbcSourceOptions.PASSWORD)); - return DriverManager.getConnection(jdbcUrl, pro); - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(JdbcSourceOptions.DATABASE_NAME); - String schemaName = config.get(JdbcSourceOptions.SCHEMA_NAME); - List schemaList = new ArrayList<>(); - LOG.info("database-name {}, schema-name {}", databaseName, schemaName); - try (Connection conn = getConnection()) { - DatabaseMetaData metaData = conn.getMetaData(); - try (ResultSet tables = - metaData.getTables(null, schemaName, "%", new String[] {"TABLE"})) { - while (tables.next()) { - String tableName = tables.getString("TABLE_NAME"); - String tableComment = tables.getString("REMARKS"); - if (!isSyncNeeded(tableName)) { - continue; - } - SourceSchema sourceSchema = - new Db2Schema( - metaData, databaseName, schemaName, tableName, tableComment); - sourceSchema.setModel( - !sourceSchema.primaryKeys.isEmpty() - ? DataModel.UNIQUE - : DataModel.DUPLICATE); - schemaList.add(sourceSchema); - } - } - } - return schemaList; - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - String databaseName = config.get(JdbcSourceOptions.DATABASE_NAME); - String schemaName = config.get(JdbcSourceOptions.SCHEMA_NAME); - Preconditions.checkNotNull(databaseName, "database-name in DB2 is required"); - Preconditions.checkNotNull(schemaName, "schema-name in DB2 is required"); - - String tableName = config.get(JdbcSourceOptions.TABLE_NAME); - String hostname = config.get(JdbcSourceOptions.HOSTNAME); - Integer port = config.get(PORT); - String username = config.get(JdbcSourceOptions.USERNAME); - String password = config.get(JdbcSourceOptions.PASSWORD); - - StartupOptions startupOptions = StartupOptions.initial(); - String startupMode = config.get(SourceOptions.SCAN_STARTUP_MODE); - if ("initial".equalsIgnoreCase(startupMode)) { - startupOptions = StartupOptions.initial(); - } else if ("latest-offset".equalsIgnoreCase(startupMode)) { - startupOptions = StartupOptions.latest(); - } - - // debezium properties set - Properties debeziumProperties = new Properties(); - debeziumProperties.putAll(Db2DateConverter.DEFAULT_PROPS); - debeziumProperties.put("decimal.handling.mode", "string"); - - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX)) { - debeziumProperties.put( - key.substring(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX.length()), value); - } - } - - DebeziumDeserializationSchema schema; - if (ignoreDefaultValue) { - schema = new DorisJsonDebeziumDeserializationSchema(); - } else { - Map customConverterConfigs = new HashMap<>(); - schema = new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - } - - if (config.getBoolean(SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_ENABLED, true)) { - JdbcIncrementalSource db2IncrementalSource = - Db2SourceBuilder.Db2IncrementalSource.builder() - .hostname(hostname) - .port(port) - .databaseList(databaseName) - .tableList(tableName) - .username(username) - .password(password) - .deserializer(schema) - .debeziumProperties(debeziumProperties) - .startupOptions(startupOptions) - .includeSchemaChanges(true) - .debeziumProperties(debeziumProperties) - .serverTimeZone(config.get(SERVER_TIME_ZONE)) - .splitSize(config.get(SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE)) - .splitMetaGroupSize(config.get(CHUNK_META_GROUP_SIZE)) - .fetchSize(config.get(SCAN_SNAPSHOT_FETCH_SIZE)) - .connectTimeout(config.get(CONNECT_TIMEOUT)) - .connectionPoolSize(config.get(CONNECTION_POOL_SIZE)) - .connectMaxRetries(config.get(CONNECT_MAX_RETRIES)) - .distributionFactorUpper( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND)) - .distributionFactorLower( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND)) - .build(); - return env.fromSource( - db2IncrementalSource, WatermarkStrategy.noWatermarks(), "Db2IncrementalSource"); - - } else { - DebeziumSourceFunction db2Source = - Db2Source.builder() - .hostname(hostname) - .port(port) - .database(databaseName) - .tableList(tableName) - .username(username) - .password(password) - .debeziumProperties(debeziumProperties) - .startupOptions(startupOptions) - .deserializer(schema) - .build(); - return env.addSource(db2Source, "Db2 Source"); - } - } - - @Override - public String getTableListPrefix() { - return config.get(JdbcSourceOptions.SCHEMA_NAME); - } - - @Override - protected String getJdbcUrlTemplate(String initialJdbcUrl, Properties jdbcProperties) { - StringBuilder jdbcUrlBuilder = new StringBuilder(initialJdbcUrl); - boolean firstParam = true; - for (Map.Entry entry : jdbcProperties.entrySet()) { - Object key = entry.getKey(); - Object value = entry.getValue(); - if (firstParam) { - jdbcUrlBuilder.append(":").append(key).append("=").append(value).append(";"); - firstParam = false; - } else { - jdbcUrlBuilder.append(key).append("=").append(value).append(";"); - } - } - return jdbcUrlBuilder.toString(); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DateConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DateConverter.java deleted file mode 100644 index 9d681d8a9..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2DateConverter.java +++ /dev/null @@ -1,133 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.db2; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Date; -import java.sql.Time; -import java.sql.Timestamp; -import java.time.DateTimeException; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.Properties; -import java.util.function.Consumer; - -public class Db2DateConverter implements CustomConverter { - private static final Logger LOGGER = LoggerFactory.getLogger(Db2DateConverter.class); - private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; - private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; - private final DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; - - protected static final Properties DEFAULT_PROPS = new Properties(); - - static { - DEFAULT_PROPS.setProperty(DatabaseSyncConfig.CONVERTERS, DatabaseSyncConfig.DATE); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_TYPE, - "org.apache.doris.flink.tools.cdc.db2.Db2DateConverter"); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_DATE, DatabaseSyncConfig.YEAR_MONTH_DAY_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_TIMESTAMP, DatabaseSyncConfig.DATETIME_MICRO_FORMAT); - } - - @Override - public void configure(Properties props) { - readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - "format.timestamp", - p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); - } - - private void readProps(Properties properties, String settingKey, Consumer callback) { - String settingValue = (String) properties.get(settingKey); - if (settingValue == null || settingValue.isEmpty()) { - return; - } - try { - callback.accept(settingValue.trim()); - } catch (IllegalArgumentException | DateTimeException e) { - LOGGER.error("setting {} is illegal:{}", settingKey, settingValue); - throw e; - } - } - - @Override - public void converterFor( - RelationalColumn column, - CustomConverter.ConverterRegistration registration) { - String sqlType = column.typeName().toUpperCase(); - SchemaBuilder schemaBuilder = null; - CustomConverter.Converter converter = null; - if (DatabaseSyncConfig.UPPERCASE_DATE.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDate; - } - if (DatabaseSyncConfig.TIME.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTime; - } - if (DatabaseSyncConfig.TIMESTAMP.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTimestamp; - } - if (schemaBuilder != null) { - registration.register(schemaBuilder, converter); - } - } - - private String convertDate(Object input) { - if (input instanceof LocalDate) { - return dateFormatter.format((LocalDate) input); - } else if (input instanceof Integer) { - LocalDate date = LocalDate.ofEpochDay((Integer) input); - return dateFormatter.format(date); - } else if (input instanceof Date) { - return dateFormatter.format(((Date) input).toLocalDate()); - } - return null; - } - - private String convertTime(Object input) { - if (input instanceof Time) { - return timeFormatter.format(((Time) input).toLocalTime()); - } - return null; - } - - private String convertTimestamp(Object input) { - if (input instanceof Timestamp) { - return timestampFormatter.format(((Timestamp) input).toLocalDateTime()); - } else if (input instanceof Instant) { - LocalDateTime ldt = LocalDateTime.ofInstant(((Instant) input), ZoneOffset.UTC); - return timestampFormatter.format(ldt); - } - return null; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2Schema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2Schema.java deleted file mode 100644 index c36777f36..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/db2/Db2Schema.java +++ /dev/null @@ -1,54 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.db2; - -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.apache.doris.flink.tools.cdc.JdbcSourceSchema; - -import java.sql.DatabaseMetaData; -import java.sql.SQLException; -import java.util.LinkedHashMap; - -public class Db2Schema extends JdbcSourceSchema { - public Db2Schema( - DatabaseMetaData metaData, - String databaseName, - String schemaName, - String tableName, - String tableComment) - throws Exception { - super(metaData, databaseName, schemaName, tableName, tableComment); - } - - @Override - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return Db2Type.toDorisType(fieldType, precision, scale); - } - - @Override - public String getCdcTableName() { - return schemaName + "\\." + tableName; - } - - @Override - public LinkedHashMap getColumnInfo( - DatabaseMetaData metaData, String databaseName, String schemaName, String tableName) - throws SQLException { - return super.getColumnInfo(metaData, null, schemaName, tableName); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/deserialize/DorisJsonDebeziumDeserializationSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/deserialize/DorisJsonDebeziumDeserializationSchema.java deleted file mode 100644 index b7e4575cb..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/deserialize/DorisJsonDebeziumDeserializationSchema.java +++ /dev/null @@ -1,197 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.deserialize; - -import org.apache.flink.api.common.typeinfo.BasicTypeInfo; -import org.apache.flink.api.common.typeinfo.TypeInformation; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.ConnectSchema; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.Field; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.Schema; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.Struct; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.source.SourceRecord; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.util.Collector; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.doris.flink.exception.DorisException; - -import java.math.BigDecimal; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.Collection; -import java.util.Map; - -/** Currently just use for synchronous mysql non-default. */ -public class DorisJsonDebeziumDeserializationSchema - implements DebeziumDeserializationSchema { - - private static final JsonNodeFactory JSON_NODE_FACTORY = - JsonNodeFactory.withExactBigDecimals(true); - private final ObjectMapper objectMapper; - - public DorisJsonDebeziumDeserializationSchema() { - objectMapper = new ObjectMapper(); - } - - @Override - public void deserialize(SourceRecord sourceRecord, Collector collector) - throws Exception { - Schema schema = sourceRecord.valueSchema(); - Object value = sourceRecord.value(); - JsonNode jsonValue = convertToJson(schema, value); - byte[] bytes = objectMapper.writeValueAsString(jsonValue).getBytes(StandardCharsets.UTF_8); - collector.collect(new String(bytes)); - } - - private JsonNode convertToJson(Schema schema, Object value) throws DorisException { - if (value == null) { - // Any schema is valid and we don't have a default, so treat this as an optional schema - if (schema == null) { - return null; - } - if (schema.isOptional()) { - return JSON_NODE_FACTORY.nullNode(); - } - throw new DorisException( - "Conversion error: null value for field that is required and has no default value"); - } - - try { - final Schema.Type schemaType; - if (schema == null) { - schemaType = ConnectSchema.schemaType(value.getClass()); - if (schemaType == null) { - throw new DorisException( - "Java class " - + value.getClass() - + " does not have corresponding schema type."); - } - } else { - schemaType = schema.type(); - } - switch (schemaType) { - case INT8: - return JSON_NODE_FACTORY.numberNode((Byte) value); - case INT16: - return JSON_NODE_FACTORY.numberNode((Short) value); - case INT32: - return JSON_NODE_FACTORY.numberNode((Integer) value); - case INT64: - return JSON_NODE_FACTORY.numberNode((Long) value); - case FLOAT32: - return JSON_NODE_FACTORY.numberNode((Float) value); - case FLOAT64: - return JSON_NODE_FACTORY.numberNode((Double) value); - case BOOLEAN: - return JSON_NODE_FACTORY.booleanNode((Boolean) value); - case STRING: - CharSequence charSeq = (CharSequence) value; - return JSON_NODE_FACTORY.textNode(charSeq.toString()); - case BYTES: - if (value instanceof byte[]) { - return JSON_NODE_FACTORY.binaryNode((byte[]) value); - } else if (value instanceof ByteBuffer) { - return JSON_NODE_FACTORY.binaryNode(((ByteBuffer) value).array()); - } else if (value instanceof BigDecimal) { - return JSON_NODE_FACTORY.numberNode((BigDecimal) value); - } else { - throw new DorisException( - "Invalid type for bytes type: " + value.getClass()); - } - case ARRAY: - { - Collection collection = (Collection) value; - ArrayNode list = JSON_NODE_FACTORY.arrayNode(); - for (Object elem : collection) { - Schema valueSchema = schema == null ? null : schema.valueSchema(); - JsonNode fieldValue = convertToJson(valueSchema, elem); - list.add(fieldValue); - } - return list; - } - case MAP: - { - Map map = (Map) value; - // If true, using string keys and JSON object; if false, using non-string - // keys and Array-encoding - boolean objectMode; - if (schema == null) { - objectMode = true; - for (Map.Entry entry : map.entrySet()) { - if (!(entry.getKey() instanceof String)) { - objectMode = false; - break; - } - } - } else { - objectMode = schema.keySchema().type() == Schema.Type.STRING; - } - ObjectNode obj = null; - ArrayNode list = null; - if (objectMode) { - obj = JSON_NODE_FACTORY.objectNode(); - } else { - list = JSON_NODE_FACTORY.arrayNode(); - } - for (Map.Entry entry : map.entrySet()) { - Schema keySchema = schema == null ? null : schema.keySchema(); - Schema valueSchema = schema == null ? null : schema.valueSchema(); - JsonNode mapKey = convertToJson(keySchema, entry.getKey()); - JsonNode mapValue = convertToJson(valueSchema, entry.getValue()); - - if (objectMode) { - obj.set(mapKey.asText(), mapValue); - } else { - list.add(JSON_NODE_FACTORY.arrayNode().add(mapKey).add(mapValue)); - } - } - return objectMode ? obj : list; - } - case STRUCT: - { - Struct struct = (Struct) value; - if (!struct.schema().equals(schema)) { - throw new DorisException("Mismatching schema."); - } - ObjectNode obj = JSON_NODE_FACTORY.objectNode(); - for (Field field : schema.fields()) { - obj.set( - field.name(), - convertToJson( - field.schema(), - struct.getWithoutDefault(field.name()))); - } - return obj; - } - } - throw new DorisException("Couldn't convert " + value + " to JSON."); - } catch (ClassCastException e) { - String schemaTypeStr = (schema != null) ? schema.type().toString() : "unknown schema"; - throw new DorisException("Invalid type for " + schemaTypeStr + ": " + value.getClass()); - } - } - - @Override - public TypeInformation getProducedType() { - return BasicTypeInfo.STRING_TYPE_INFO; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/ChangeStreamConstant.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/ChangeStreamConstant.java deleted file mode 100644 index f8772c9f7..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/ChangeStreamConstant.java +++ /dev/null @@ -1,42 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import java.io.Serializable; - -public class ChangeStreamConstant implements Serializable { - private static final long serialVersionUID = 2599456667907755804L; - public static final String ID_FIELD = "_id"; - public static final String OID_FIELD = "$oid"; - public static final String FIELD_TYPE = "operationType"; - public static final String FIELD_DATA = "fullDocument"; - public static final String OP_UPDATE = "update"; - public static final String OP_INSERT = "insert"; - public static final String OP_REPLACE = "replace"; - public static final String OP_DELETE = "delete"; - public static final String FIELD_DATABASE = "db"; - public static final String FIELD_TABLE = "coll"; - public static final String FIELD_NAMESPACE = "ns"; - public static final String FIELD_DOCUMENT_KEY = "documentKey"; - - public static final String DATE_FIELD = "$date"; - - public static final String DECIMAL_FIELD = "$numberDecimal"; - - public static final String LONG_FIELD = "$numberLong"; -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBDatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBDatabaseSync.java deleted file mode 100644 index eac6acc48..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBDatabaseSync.java +++ /dev/null @@ -1,220 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.cdc.connectors.base.options.StartupOptions; -import org.apache.flink.cdc.connectors.mongodb.source.MongoDBSource; -import org.apache.flink.cdc.connectors.mongodb.source.MongoDBSourceBuilder; -import org.apache.flink.cdc.connectors.mongodb.source.config.MongoDBSourceOptions; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.configuration.ConfigOption; -import org.apache.flink.configuration.ConfigOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import com.mongodb.ConnectionString; -import com.mongodb.MongoClientSettings; -import com.mongodb.client.MongoClient; -import com.mongodb.client.MongoClients; -import com.mongodb.client.MongoCollection; -import com.mongodb.client.MongoDatabase; -import com.mongodb.client.MongoIterable; -import org.apache.commons.lang3.StringUtils; -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.sink.writer.serializer.DorisRecordSerializer; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.apache.doris.flink.tools.cdc.ParsingProcessFunction; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.mongodb.serializer.MongoDBJsonDebeziumSchemaSerializer; -import org.bson.Document; - -import javax.annotation.Nullable; - -import java.sql.Connection; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import static org.apache.flink.cdc.connectors.mongodb.internal.MongoDBEnvelope.encodeValue; -import static org.apache.flink.util.Preconditions.checkNotNull; - -public class MongoDBDatabaseSync extends DatabaseSync { - public static final ConfigOption MONGO_CDC_CREATE_SAMPLE_PERCENT = - ConfigOptions.key("schema.sample-percent") - .doubleType() - .defaultValue(0.2) - .withDescription("mongo cdc sample percent"); - - public static final ConfigOption TABLE_NAME = - ConfigOptions.key("table-name") - .stringType() - .noDefaultValue() - .withDescription("Table name of the Mongo database to monitor."); - - public MongoDBDatabaseSync() throws SQLException {} - - @Override - public void registerDriver() throws SQLException {} - - @Override - public Connection getConnection() throws SQLException { - return null; - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(MongoDBSourceOptions.DATABASE); - List schemaList = new ArrayList<>(); - MongoClientSettings.Builder settingsBuilder = MongoClientSettings.builder(); - - settingsBuilder.applyConnectionString( - new ConnectionString( - buildConnectionString( - config.get(MongoDBSourceOptions.USERNAME), - config.get(MongoDBSourceOptions.PASSWORD), - config.get(MongoDBSourceOptions.SCHEME), - config.get(MongoDBSourceOptions.HOSTS), - config.get(MongoDBSourceOptions.CONNECTION_OPTIONS)))); - - MongoClientSettings settings = settingsBuilder.build(); - Double samplePercent = config.get(MONGO_CDC_CREATE_SAMPLE_PERCENT); - try (MongoClient mongoClient = MongoClients.create(settings)) { - MongoDatabase mongoDatabase = mongoClient.getDatabase(databaseName); - MongoIterable collectionNames = mongoDatabase.listCollectionNames(); - for (String collectionName : collectionNames) { - if (!isSyncNeeded(collectionName)) { - continue; - } - MongoCollection collection = mongoDatabase.getCollection(collectionName); - Document firstDocument = collection.find().first(); - if (firstDocument == null) { - throw new IllegalStateException("No documents in collection to infer schema"); - } - - long totalDocuments = collection.countDocuments(); - long sampleSize = (long) Math.ceil(totalDocuments * samplePercent); - ArrayList documents = sampleData(collection, sampleSize); - MongoDBSchema mongoDBSchema = - new MongoDBSchema(documents, databaseName, collectionName, null); - mongoDBSchema.setModel(DataModel.UNIQUE); - schemaList.add(mongoDBSchema); - } - } - - return schemaList; - } - - private ArrayList sampleData(MongoCollection collection, Long sampleNum) { - ArrayList query = new ArrayList<>(); - query.add(new Document("$sample", new Document("size", sampleNum))); - // allowDiskUse to avoid mongo 'Sort exceeded memory limit' error - return collection.aggregate(query).allowDiskUse(true).into(new ArrayList<>()); - } - - private static String buildConnectionString( - @Nullable String username, - @Nullable String password, - String scheme, - String hosts, - @Nullable String connectionOptions) { - StringBuilder sb = new StringBuilder(scheme).append("://"); - if (StringUtils.isNotEmpty(username) && StringUtils.isNotEmpty(password)) { - sb.append(encodeValue(username)).append(":").append(encodeValue(password)).append("@"); - } - sb.append(checkNotNull(hosts)); - if (StringUtils.isNotEmpty(connectionOptions)) { - sb.append("/?").append(connectionOptions); - } - return sb.toString(); - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - String hosts = config.get(MongoDBSourceOptions.HOSTS); - String username = config.get(MongoDBSourceOptions.USERNAME); - String password = config.get(MongoDBSourceOptions.PASSWORD); - String database = config.get(MongoDBSourceOptions.DATABASE); - // note: just to unify job name, no other use. - config.setString(DatabaseSyncConfig.DATABASE_NAME, database); - String collection = config.get(MongoDBSourceOptions.COLLECTION); - if (StringUtils.isBlank(collection)) { - collection = config.get(TABLE_NAME); - } - MongoDBSourceBuilder mongoDBSourceBuilder = MongoDBSource.builder(); - Map customConverterConfigs = new HashMap<>(); - customConverterConfigs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); - JsonDebeziumDeserializationSchema schema = - new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - - mongoDBSourceBuilder - .hosts(hosts) - .username(username) - .password(password) - .databaseList(database) - .collectionList(collection); - - String startupMode = config.get(SourceOptions.SCAN_STARTUP_MODE); - switch (startupMode.toLowerCase()) { - case DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL: - mongoDBSourceBuilder.startupOptions(StartupOptions.initial()); - break; - case DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET: - mongoDBSourceBuilder.startupOptions(StartupOptions.latest()); - break; - case DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_TIMESTAMP: - mongoDBSourceBuilder.startupOptions( - StartupOptions.timestamp( - config.get(SourceOptions.SCAN_STARTUP_TIMESTAMP_MILLIS))); - break; - default: - throw new IllegalArgumentException("Unsupported startup mode: " + startupMode); - } - MongoDBSource mongoDBSource = mongoDBSourceBuilder.deserializer(schema).build(); - return env.fromSource(mongoDBSource, WatermarkStrategy.noWatermarks(), "MongoDB Source"); - } - - @Override - public ParsingProcessFunction buildProcessFunction() { - return new MongoParsingProcessFunction(converter); - } - - @Override - public DorisRecordSerializer buildSchemaSerializer( - DorisOptions.Builder dorisBuilder, DorisExecutionOptions executionOptions) { - return MongoDBJsonDebeziumSchemaSerializer.builder() - .setDorisOptions(dorisBuilder.build()) - .setExecutionOptions(executionOptions) - .setTableMapping(tableMapping) - .setTableConf(dorisTableConfig) - .setTargetDatabase(database) - .build(); - } - - @Override - public String getTableListPrefix() { - return config.get(MongoDBSourceOptions.DATABASE); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchema.java deleted file mode 100644 index 984419bcc..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchema.java +++ /dev/null @@ -1,145 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.flink.annotation.VisibleForTesting; -import org.apache.flink.api.java.tuple.Tuple2; - -import org.apache.doris.flink.catalog.doris.DorisType; -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.bson.Document; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -public class MongoDBSchema extends SourceSchema { - private static final Logger LOG = LoggerFactory.getLogger(MongoDBSchema.class); - private static final List CONVERT_TYPE = - Arrays.asList(DorisType.BIGINT, DorisType.INT, DorisType.SMALLINT, DorisType.TINYINT); - - public enum DecimalJudgement { - NOT_DECIMAL, - CERTAIN_DECIMAL, - CONVERT_TO_DECIMAL; - - public static boolean needProcessing(DecimalJudgement decimalJudgement) { - return !decimalJudgement.equals(NOT_DECIMAL); - } - } - - public MongoDBSchema( - ArrayList sampleData, - String databaseName, - String tableName, - String tableComment) - throws Exception { - super(databaseName, null, tableName, tableComment); - fields = new LinkedHashMap<>(); - for (Document data : sampleData) { - processSampleData(data); - } - - primaryKeys = new ArrayList<>(); - primaryKeys.add("_id"); - } - - @VisibleForTesting - protected void processSampleData(Document sampleData) { - for (Map.Entry entry : sampleData.entrySet()) { - String fieldName = entry.getKey(); - Object value = entry.getValue(); - String dorisType = determineDorisType(fieldName, value); - fields.put(fieldName, new FieldSchema(fieldName, dorisType, null)); - } - } - - private String determineDorisType(String fieldName, Object value) { - String dorisType = MongoDBType.toDorisType(value); - // Check if the type is string or if the existing field is a string type - FieldSchema existingField = fields.get(fieldName); - if (dorisType.equals(DorisType.STRING) - || (existingField != null - && existingField.getTypeString().equals(DorisType.STRING))) { - return DorisType.STRING; - } - // Check and process for decimal types - DecimalJudgement decimalJudgement = judgeDecimalField(fieldName, dorisType); - if (DecimalJudgement.needProcessing(decimalJudgement)) { - if (decimalJudgement == DecimalJudgement.CONVERT_TO_DECIMAL) { - int precision = value.toString().length(); - dorisType = MongoDBType.formatDecimalType(precision, 0); - } - dorisType = replaceDecimalTypeIfNeeded(fieldName, dorisType); - } - return dorisType; - } - - private DecimalJudgement judgeDecimalField(String fieldName, String dorisType) { - FieldSchema existingField = fields.get(fieldName); - if (existingField == null) { - return DecimalJudgement.NOT_DECIMAL; - } - boolean existDecimal = existingField.getTypeString().startsWith(DorisType.DECIMAL); - boolean isDecimal = dorisType.startsWith(DorisType.DECIMAL); - if (existDecimal && isDecimal) { - return DecimalJudgement.CERTAIN_DECIMAL; - } else if (CONVERT_TYPE.contains(dorisType)) { - return DecimalJudgement.CONVERT_TO_DECIMAL; - } - return DecimalJudgement.NOT_DECIMAL; - } - - @VisibleForTesting - protected String replaceDecimalTypeIfNeeded(String fieldName, String newDorisType) { - FieldSchema existingField = fields.get(fieldName); - if (existingField.getTypeString().startsWith(DorisType.DECIMAL)) { - Tuple2 existingPrecisionAndScale = - MongoDBType.getDecimalPrecisionAndScale(existingField.getTypeString()); - int existingPrecision = existingPrecisionAndScale.f0; - int existingScale = existingPrecisionAndScale.f1; - Tuple2 currentPrecisionAndScale = - MongoDBType.getDecimalPrecisionAndScale(newDorisType); - int currentPrecision = currentPrecisionAndScale.f0; - int currentScale = currentPrecisionAndScale.f1; - - int newScale = Math.max(existingScale, currentScale); - int newIntegerPartSize = - Math.max(existingPrecision - existingScale, currentPrecision - currentScale); - int newPrecision = newIntegerPartSize + newScale; - - return DorisType.DECIMAL + "(" + newPrecision + "," + newScale + ")"; - } - return newDorisType; - } - - @Override - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return null; - } - - @Override - public String getCdcTableName() { - return databaseName + "\\." + tableName; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBType.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBType.java deleted file mode 100644 index 578a407cd..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBType.java +++ /dev/null @@ -1,128 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.flink.api.java.tuple.Tuple2; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.ArrayNode; -import com.fasterxml.jackson.databind.node.BooleanNode; -import com.fasterxml.jackson.databind.node.DecimalNode; -import com.fasterxml.jackson.databind.node.DoubleNode; -import com.fasterxml.jackson.databind.node.IntNode; -import com.fasterxml.jackson.databind.node.LongNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.node.TextNode; -import org.apache.doris.flink.catalog.doris.DorisType; -import org.apache.doris.flink.exception.DorisRuntimeException; -import org.bson.BsonArray; -import org.bson.types.Decimal128; -import org.bson.types.ObjectId; - -import java.math.BigDecimal; -import java.util.Date; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class MongoDBType { - - public static final String DATE_TYPE = "$date"; - public static final String DECIMAL_TYPE = "$numberDecimal"; - public static final String LONG_TYPE = "$numberLong"; - - public static String toDorisType(Object value) { - if (value instanceof Integer) { - return DorisType.INT; - } else if (value instanceof Date) { - return DorisType.DATETIME_V2 + "(3)"; - } else if (value instanceof Long) { - return DorisType.BIGINT; - } else if (value instanceof Double) { - return checkAndRebuildBigDecimal(new BigDecimal(String.valueOf(value))); - } else if (value instanceof Boolean) { - return DorisType.BOOLEAN; - } else if (value instanceof String) { - return DorisType.STRING; - } else if (value instanceof ObjectId) { - return DorisType.VARCHAR + "(30)"; - } else if (value instanceof BsonArray) { - return DorisType.ARRAY; - } else if (value instanceof Decimal128) { - return checkAndRebuildBigDecimal(((Decimal128) value).bigDecimalValue()); - } else { - return DorisType.STRING; - } - } - - public static String jsonNodeToDorisType(JsonNode value) { - if (value instanceof IntNode) { - return DorisType.INT; - } else if (value instanceof TextNode) { - return DorisType.STRING; - } else if (value instanceof LongNode) { - return DorisType.BIGINT; - } else if (value instanceof DoubleNode) { - return DorisType.DOUBLE; - } else if (value instanceof BooleanNode) { - return DorisType.BOOLEAN; - } else if (value instanceof ArrayNode) { - return DorisType.ARRAY; - } else if (value instanceof DecimalNode) { - return checkAndRebuildBigDecimal(value.decimalValue()); - } else if (value instanceof ObjectNode) { - if (value.size() == 1 && value.get(DATE_TYPE) != null) { - return DorisType.DATETIME_V2 + "(3)"; - } else if (value.size() == 1 && value.get(DECIMAL_TYPE) != null) { - return checkAndRebuildBigDecimal(new BigDecimal(value.get(DECIMAL_TYPE).asText())); - } else if (value.size() == 1 && value.get(LONG_TYPE) != null) { - return DorisType.BIGINT; - } else { - return DorisType.STRING; - } - } else { - return DorisType.STRING; - } - } - - public static Tuple2 getDecimalPrecisionAndScale(String decimalString) { - // Simplified regular expression to match two numbers in brackets - String regex = "\\((\\d+),(\\d+)\\)"; - Pattern pattern = Pattern.compile(regex); - Matcher matcher = pattern.matcher(decimalString); - - if (matcher.find()) { - Integer precision = Integer.parseInt(matcher.group(1)); - Integer scale = Integer.parseInt(matcher.group(2)); - return new Tuple2<>(precision, scale); - } - throw new DorisRuntimeException("Get Decimal precision and Scale error !"); - } - - public static String checkAndRebuildBigDecimal(BigDecimal decimal) { - if (decimal.scale() < 0) { - decimal = new BigDecimal(decimal.toPlainString()); - } - return decimal.precision() <= 38 - ? formatDecimalType(decimal.precision(), Math.max(decimal.scale(), 0)) - : DorisType.STRING; - } - - public static String formatDecimalType(int precision, int scale) { - return String.format("%s(%s,%s)", DorisType.DECIMAL_V3, precision, scale); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverter.java deleted file mode 100644 index 614fa092a..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverter.java +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; - -import java.time.Instant; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; - -public class MongoDateConverter { - private static final ThreadLocal dateFormatterThreadLocal = - ThreadLocal.withInitial( - () -> DateTimeFormatter.ofPattern(DatabaseSyncConfig.DATETIME_MICRO_FORMAT)); - - public static String convertTimestampToString(long timestamp) { - Instant instant = Instant.ofEpochMilli(timestamp); - LocalDateTime localDateTime = - LocalDateTime.ofInstant(instant, ZoneId.of(DatabaseSyncConfig.TIME_ZONE_SHANGHAI)); - return dateFormatterThreadLocal.get().format(localDateTime); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunction.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunction.java deleted file mode 100644 index 737617a01..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunction.java +++ /dev/null @@ -1,44 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.node.NullNode; -import org.apache.doris.flink.tools.cdc.DatabaseSync.TableNameConverter; -import org.apache.doris.flink.tools.cdc.ParsingProcessFunction; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class MongoParsingProcessFunction extends ParsingProcessFunction { - private static final Logger LOG = LoggerFactory.getLogger(MongoParsingProcessFunction.class); - - public MongoParsingProcessFunction(TableNameConverter converter) { - super(converter); - } - - @Override - protected String getRecordTableName(String record) throws Exception { - JsonNode jsonNode = objectMapper.readValue(record, JsonNode.class); - if (jsonNode.get("ns") == null || jsonNode.get("ns") instanceof NullNode) { - LOG.error("Failed to get cdc namespace"); - throw new RuntimeException(); - } - JsonNode nameSpace = jsonNode.get("ns"); - return extractJsonNode(nameSpace, "coll"); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoDBJsonDebeziumSchemaSerializer.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoDBJsonDebeziumSchemaSerializer.java deleted file mode 100644 index d4a87ff87..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoDBJsonDebeziumSchemaSerializer.java +++ /dev/null @@ -1,211 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb.serializer; - -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.sink.writer.serializer.DorisRecord; -import org.apache.doris.flink.sink.writer.serializer.DorisRecordSerializer; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.CdcDataChange; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.CdcSchemaChange; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeContext; -import org.apache.doris.flink.tools.cdc.DorisTableConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Map; -import java.util.regex.Pattern; - -import static org.apache.doris.flink.sink.writer.LoadConstants.LINE_DELIMITER_DEFAULT; -import static org.apache.doris.flink.sink.writer.LoadConstants.LINE_DELIMITER_KEY; - -public class MongoDBJsonDebeziumSchemaSerializer implements DorisRecordSerializer { - - private static final Logger LOG = - LoggerFactory.getLogger(MongoDBJsonDebeziumSchemaSerializer.class); - private final Pattern pattern; - private final DorisOptions dorisOptions; - private final ObjectMapper objectMapper = new ObjectMapper(); - // table name of the cdc upstream, format is db.tbl - private final String sourceTableName; - private String lineDelimiter = LINE_DELIMITER_DEFAULT; - private boolean ignoreUpdateBefore = true; - private boolean enableDelete = true; - // - private Map tableMapping; - // create table properties - private DorisTableConfig dorisTableConfig; - private String targetDatabase; - - private CdcDataChange dataChange; - private CdcSchemaChange schemaChange; - - private String targetTablePrefix; - private String targetTableSuffix; - - public MongoDBJsonDebeziumSchemaSerializer( - DorisOptions dorisOptions, - Pattern pattern, - String sourceTableName, - DorisExecutionOptions executionOptions, - Map tableMapping, - DorisTableConfig dorisTableConfig, - String targetDatabase, - String targetTablePrefix, - String targetTableSuffix) { - this.dorisOptions = dorisOptions; - this.pattern = pattern; - this.sourceTableName = sourceTableName; - // Prevent loss of decimal data precision - this.objectMapper.enable(DeserializationFeature.USE_BIG_DECIMAL_FOR_FLOATS); - JsonNodeFactory jsonNodeFactory = JsonNodeFactory.withExactBigDecimals(true); - this.objectMapper.setNodeFactory(jsonNodeFactory); - this.tableMapping = tableMapping; - this.dorisTableConfig = dorisTableConfig; - this.targetDatabase = targetDatabase; - this.targetTablePrefix = targetTablePrefix; - this.targetTableSuffix = targetTableSuffix; - if (executionOptions != null) { - this.lineDelimiter = - executionOptions - .getStreamLoadProp() - .getProperty(LINE_DELIMITER_KEY, LINE_DELIMITER_DEFAULT); - this.ignoreUpdateBefore = executionOptions.getIgnoreUpdateBefore(); - this.enableDelete = executionOptions.getDeletable(); - } - init(); - } - - private void init() { - JsonDebeziumChangeContext changeContext = - new JsonDebeziumChangeContext( - dorisOptions, - tableMapping, - sourceTableName, - targetDatabase, - dorisTableConfig, - objectMapper, - pattern, - lineDelimiter, - ignoreUpdateBefore, - targetTablePrefix, - targetTableSuffix, - enableDelete); - this.dataChange = new MongoJsonDebeziumDataChange(changeContext); - this.schemaChange = new MongoJsonDebeziumSchemaChange(changeContext); - } - - @Override - public DorisRecord serialize(String record) throws IOException { - LOG.debug("received debezium json data {} :", record); - JsonNode recordRoot = objectMapper.readValue(record, JsonNode.class); - String op = getOperateType(recordRoot); - try { - schemaChange.schemaChange(recordRoot); - } catch (Exception e) { - throw new RuntimeException(e); - } - return dataChange.serialize(record, recordRoot, op); - } - - private String getOperateType(JsonNode recordRoot) { - return recordRoot.get("operationType").asText(); - } - - public static MongoDBJsonDebeziumSchemaSerializer.Builder builder() { - return new MongoDBJsonDebeziumSchemaSerializer.Builder(); - } - - public static class Builder { - private DorisOptions dorisOptions; - private Pattern addDropDDLPattern; - private String sourceTableName; - private DorisExecutionOptions executionOptions; - private Map tableMapping; - private DorisTableConfig dorisTableConfig; - private String targetDatabase; - private String targetTablePrefix = ""; - private String targetTableSuffix = ""; - - public MongoDBJsonDebeziumSchemaSerializer.Builder setDorisOptions( - DorisOptions dorisOptions) { - this.dorisOptions = dorisOptions; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setPattern(Pattern addDropDDLPattern) { - this.addDropDDLPattern = addDropDDLPattern; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setSourceTableName( - String sourceTableName) { - this.sourceTableName = sourceTableName; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setExecutionOptions( - DorisExecutionOptions executionOptions) { - this.executionOptions = executionOptions; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setTableMapping( - Map tableMapping) { - this.tableMapping = tableMapping; - return this; - } - - @Deprecated - public MongoDBJsonDebeziumSchemaSerializer.Builder setTableProperties( - Map tableProperties) { - this.dorisTableConfig = new DorisTableConfig(tableProperties); - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setTableConf( - DorisTableConfig dorisTableConfig) { - this.dorisTableConfig = dorisTableConfig; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer.Builder setTargetDatabase( - String targetDatabase) { - this.targetDatabase = targetDatabase; - return this; - } - - public MongoDBJsonDebeziumSchemaSerializer build() { - return new MongoDBJsonDebeziumSchemaSerializer( - dorisOptions, - addDropDDLPattern, - sourceTableName, - executionOptions, - tableMapping, - dorisTableConfig, - targetDatabase, - targetTablePrefix, - targetTableSuffix); - } - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumDataChange.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumDataChange.java deleted file mode 100644 index 9dbe7ffe1..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumDataChange.java +++ /dev/null @@ -1,163 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb.serializer; - -import org.apache.flink.util.StringUtils; - -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.NullNode; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.sink.writer.ChangeEvent; -import org.apache.doris.flink.sink.writer.serializer.DorisRecord; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.CdcDataChange; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeContext; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; -import java.util.Map; - -import static org.apache.doris.flink.sink.util.DeleteOperation.addDeleteSign; -import static org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeUtils.extractJsonNode; -import static org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeUtils.getDorisTableIdentifier; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_DATA; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_DATABASE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_DOCUMENT_KEY; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_NAMESPACE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_TABLE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.ID_FIELD; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.OID_FIELD; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.OP_DELETE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.OP_INSERT; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.OP_REPLACE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.OP_UPDATE; - -public class MongoJsonDebeziumDataChange extends CdcDataChange implements ChangeEvent { - private static final Logger LOG = LoggerFactory.getLogger(MongoJsonDebeziumDataChange.class); - - public DorisOptions dorisOptions; - public String lineDelimiter; - public JsonDebeziumChangeContext changeContext; - public ObjectMapper objectMapper; - public Map tableMapping; - private final boolean enableDelete; - - public MongoJsonDebeziumDataChange(JsonDebeziumChangeContext changeContext) { - this.changeContext = changeContext; - this.dorisOptions = changeContext.getDorisOptions(); - this.objectMapper = changeContext.getObjectMapper(); - this.lineDelimiter = changeContext.getLineDelimiter(); - this.tableMapping = changeContext.getTableMapping(); - this.enableDelete = changeContext.enableDelete(); - } - - @Override - public DorisRecord serialize(String record, JsonNode recordRoot, String op) throws IOException { - // Filter out table records that are not in tableMapping - String cdcTableIdentifier = getCdcTableIdentifier(recordRoot); - String dorisTableIdentifier = - getDorisTableIdentifier(cdcTableIdentifier, dorisOptions, tableMapping); - if (StringUtils.isNullOrWhitespaceOnly(dorisTableIdentifier)) { - LOG.warn( - "filter table {}, because it is not listened, record detail is {}", - cdcTableIdentifier, - record); - return null; - } - Map valueMap; - switch (op) { - case OP_INSERT: - case OP_UPDATE: - case OP_REPLACE: - valueMap = extractAfterRow(recordRoot); - addDeleteSign(valueMap, false); - break; - case OP_DELETE: - valueMap = extractDeleteRow(recordRoot); - addDeleteSign(valueMap, enableDelete); - break; - default: - LOG.error("parse record fail, unknown op {} in {}", op, record); - return null; - } - - return DorisRecord.of( - dorisTableIdentifier, - objectMapper.writeValueAsString(valueMap).getBytes(StandardCharsets.UTF_8)); - } - - public String getCdcTableIdentifier(JsonNode record) { - if (record.get(FIELD_NAMESPACE) == null - || record.get(FIELD_NAMESPACE) instanceof NullNode) { - LOG.error("Failed to get cdc namespace"); - throw new RuntimeException(); - } - JsonNode nameSpace = record.get(FIELD_NAMESPACE); - String db = extractJsonNode(nameSpace, FIELD_DATABASE); - String table = extractJsonNode(nameSpace, FIELD_TABLE); - return SourceSchema.getString(db, null, table); - } - - @Override - public Map extractBeforeRow(JsonNode record) { - return null; - } - - @Override - public Map extractAfterRow(JsonNode recordRoot) { - JsonNode dataNode = recordRoot.get(FIELD_DATA); - Map rowMap = extractRow(dataNode); - String objectId; - // if user specifies the `_id` field manually, the $oid field may not exist - if (rowMap.get(ID_FIELD) instanceof Map) { - objectId = ((Map) rowMap.get(ID_FIELD)).get(OID_FIELD).toString(); - } else { - objectId = rowMap.get(ID_FIELD).toString(); - } - rowMap.put(ID_FIELD, objectId); - return rowMap; - } - - private Map extractDeleteRow(JsonNode recordRoot) - throws JsonProcessingException { - String documentKey = extractJsonNode(recordRoot, FIELD_DOCUMENT_KEY); - JsonNode jsonNode = objectMapper.readTree(documentKey); - String objectId; - // if user specifies the `_id` field manually, the $oid field may not exist - if (jsonNode.get(ID_FIELD).has(OID_FIELD)) { - objectId = extractJsonNode(jsonNode.get(ID_FIELD), OID_FIELD); - } else { - objectId = jsonNode.get(ID_FIELD).asText(); - } - Map row = new HashMap<>(); - row.put(ID_FIELD, objectId); - return row; - } - - private Map extractRow(JsonNode recordRow) { - Map recordMap = - objectMapper.convertValue(recordRow, new TypeReference>() {}); - return recordMap != null ? recordMap : new HashMap<>(); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumSchemaChange.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumSchemaChange.java deleted file mode 100644 index 01eebd45e..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mongodb/serializer/MongoJsonDebeziumSchemaChange.java +++ /dev/null @@ -1,198 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb.serializer; - -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.node.NullNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import org.apache.doris.flink.catalog.doris.DorisSystem; -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.exception.DorisRuntimeException; -import org.apache.doris.flink.exception.IllegalArgumentException; -import org.apache.doris.flink.sink.schema.SchemaChangeManager; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.CdcSchemaChange; -import org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeContext; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.mongodb.MongoDBType; -import org.apache.doris.flink.tools.cdc.mongodb.MongoDateConverter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -import static org.apache.doris.flink.sink.writer.serializer.jsondebezium.JsonDebeziumChangeUtils.getDorisTableIdentifier; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.DATE_FIELD; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.DECIMAL_FIELD; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_DATA; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_DATABASE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_NAMESPACE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.FIELD_TABLE; -import static org.apache.doris.flink.tools.cdc.mongodb.ChangeStreamConstant.LONG_FIELD; - -public class MongoJsonDebeziumSchemaChange extends CdcSchemaChange { - - private static final Logger LOG = LoggerFactory.getLogger(MongoJsonDebeziumSchemaChange.class); - - private final ObjectMapper objectMapper; - - private final Map> tableFields; - - private final SchemaChangeManager schemaChangeManager; - - private final DorisSystem dorisSystem; - - public Map tableMapping; - private final DorisOptions dorisOptions; - - private final Set specialFields = - new HashSet<>(Arrays.asList(DATE_FIELD, DECIMAL_FIELD, LONG_FIELD)); - - public MongoJsonDebeziumSchemaChange(JsonDebeziumChangeContext changeContext) { - this.objectMapper = changeContext.getObjectMapper(); - this.dorisOptions = changeContext.getDorisOptions(); - this.tableFields = new HashMap<>(); - this.schemaChangeManager = new SchemaChangeManager(dorisOptions); - this.dorisSystem = new DorisSystem(dorisOptions); - this.tableMapping = changeContext.getTableMapping(); - } - - @Override - public String extractDatabase(JsonNode record) { - return null; - } - - @Override - public String extractTable(JsonNode record) { - return null; - } - - @Override - public boolean schemaChange(JsonNode recordRoot) throws IOException { - JsonNode logData = getFullDocument(recordRoot); - String cdcTableIdentifier = getCdcTableIdentifier(recordRoot); - String dorisTableIdentifier = - getDorisTableIdentifier(cdcTableIdentifier, dorisOptions, tableMapping); - String[] tableInfo = dorisTableIdentifier.split("\\."); - if (tableInfo.length != 2) { - throw new DorisRuntimeException(); - } - String dataBase = tableInfo[0]; - String table = tableInfo[1]; - // build table fields mapping for all record - buildDorisTableFieldsMapping(dataBase, table); - - // Determine whether change stream log and tableField are exactly the same, if not, perform - // schema change - checkAndUpdateSchemaChange(logData, dorisTableIdentifier, dataBase, table); - formatSpecialFieldData(logData); - ((ObjectNode) recordRoot).set(FIELD_DATA, logData); - return true; - } - - private void formatSpecialFieldData(JsonNode logData) { - logData.fieldNames() - .forEachRemaining( - fieldName -> { - JsonNode fieldNode = logData.get(fieldName); - if (fieldNode.isObject() && fieldNode.size() == 1) { - String fieldKey = fieldNode.fieldNames().next(); - if (specialFields.contains(fieldKey)) { - switch (fieldKey) { - case DATE_FIELD: - long timestamp = fieldNode.get(DATE_FIELD).asLong(); - String formattedDate = - MongoDateConverter.convertTimestampToString( - timestamp); - ((ObjectNode) logData).put(fieldName, formattedDate); - break; - case DECIMAL_FIELD: - String numberDecimal = - fieldNode.get(DECIMAL_FIELD).asText(); - ((ObjectNode) logData).put(fieldName, numberDecimal); - break; - - case LONG_FIELD: - long longFiled = fieldNode.get(LONG_FIELD).asLong(); - ((ObjectNode) logData).put(fieldName, longFiled); - break; - } - } - } - }); - } - - private JsonNode getFullDocument(JsonNode recordRoot) { - try { - return objectMapper.readTree(recordRoot.get(FIELD_DATA).asText()); - } catch (IOException e) { - throw new DorisRuntimeException("Failed to parse fullDocument JSON", e); - } - } - - private void checkAndUpdateSchemaChange( - JsonNode logData, String dorisTableIdentifier, String database, String table) { - Map tableFieldMap = tableFields.get(dorisTableIdentifier); - logData.fieldNames() - .forEachRemaining( - name -> { - try { - if (!tableFieldMap.containsKey(name)) { - doSchemaChange(name, logData, database, table); - } - } catch (Exception e) { - throw new RuntimeException("Error during schema change", e); - } - }); - } - - private void doSchemaChange( - String logFieldName, JsonNode logData, String database, String table) - throws IOException, IllegalArgumentException { - String dorisType = MongoDBType.jsonNodeToDorisType(logData.get(logFieldName)); - schemaChangeManager.addColumn( - database, table, new FieldSchema(logFieldName, dorisType, null)); - String identifier = database + "." + table; - tableFields.computeIfAbsent(identifier, k -> new HashMap<>()).put(logFieldName, dorisType); - } - - private void buildDorisTableFieldsMapping(String databaseName, String tableName) { - String identifier = databaseName + "." + tableName; - tableFields.computeIfAbsent( - identifier, k -> dorisSystem.getTableFieldNames(databaseName, tableName)); - } - - @Override - public String getCdcTableIdentifier(JsonNode record) { - if (record.get(FIELD_NAMESPACE) == null - || record.get(FIELD_NAMESPACE) instanceof NullNode) { - LOG.error("Failed to get cdc namespace"); - throw new RuntimeException(); - } - JsonNode nameSpace = record.get(FIELD_NAMESPACE); - String table = nameSpace.get(FIELD_TABLE).asText(); - String db = nameSpace.get(FIELD_DATABASE).asText(); - return SourceSchema.getString(db, null, table); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/DateToStringConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/DateToStringConverter.java deleted file mode 100644 index 0c614516c..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/DateToStringConverter.java +++ /dev/null @@ -1,173 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mysql; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Timestamp; -import java.time.DateTimeException; -import java.time.Duration; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Properties; -import java.util.function.Consumer; - -public class DateToStringConverter implements CustomConverter { - private static final Logger log = LoggerFactory.getLogger(DateToStringConverter.class); - private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; - private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; - private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME; - private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; - private ZoneId timestampZoneId = ZoneId.systemDefault(); - - public static final Properties DEFAULT_PROPS = new Properties(); - - static { - DEFAULT_PROPS.setProperty(DatabaseSyncConfig.CONVERTERS, DatabaseSyncConfig.DATE); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_TYPE, - "org.apache.doris.flink.tools.cdc.mysql.DateToStringConverter"); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_DATE, DatabaseSyncConfig.YEAR_MONTH_DAY_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_DATETIME, DatabaseSyncConfig.DATETIME_MICRO_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_TIMESTAMP, DatabaseSyncConfig.DATETIME_MICRO_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_TIMESTAMP_ZONE, DatabaseSyncConfig.TIME_ZONE_UTC_8); - } - - @Override - public void configure(Properties props) { - readProps( - props, - DatabaseSyncConfig.FORMAT_DATE, - p -> dateFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_TIME, - p -> timeFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_DATETIME, - p -> datetimeFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_TIMESTAMP, - p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_TIMESTAMP_ZONE, - z -> timestampZoneId = ZoneId.of(z)); - } - - private void readProps(Properties properties, String settingKey, Consumer callback) { - String settingValue = (String) properties.get(settingKey); - if (settingValue == null || settingValue.length() == 0) { - return; - } - try { - callback.accept(settingValue.trim()); - } catch (IllegalArgumentException | DateTimeException e) { - log.error("setting {} is illegal:{}", settingKey, settingValue); - throw e; - } - } - - @Override - public void converterFor( - RelationalColumn column, ConverterRegistration registration) { - String sqlType = column.typeName().toUpperCase(); - SchemaBuilder schemaBuilder = null; - Converter converter = null; - if (DatabaseSyncConfig.UPPERCASE_DATE.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDate; - } - if (DatabaseSyncConfig.TIME.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTime; - } - if (DatabaseSyncConfig.DATETIME.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDateTime; - } - if (DatabaseSyncConfig.TIMESTAMP.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTimestamp; - } - if (schemaBuilder != null) { - registration.register(schemaBuilder, converter); - } - } - - private String convertDate(Object input) { - if (input instanceof LocalDate) { - return dateFormatter.format((LocalDate) input); - } else if (input instanceof Integer) { - LocalDate date = LocalDate.ofEpochDay((Integer) input); - return dateFormatter.format(date); - } - return null; - } - - private String convertTime(Object input) { - if (input instanceof Duration) { - Duration duration = (Duration) input; - long seconds = duration.getSeconds(); - int nano = duration.getNano(); - LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano); - return timeFormatter.format(time); - } - return null; - } - - private String convertDateTime(Object input) { - if (input instanceof LocalDateTime) { - return datetimeFormatter.format((LocalDateTime) input); - } else if (input instanceof Timestamp) { - return datetimeFormatter.format(((Timestamp) input).toLocalDateTime()); - } - return null; - } - - private String convertTimestamp(Object input) { - if (input instanceof ZonedDateTime) { - // mysql timestamp will be converted to UTC storage, - // and the zonedDatetime here is UTC time - ZonedDateTime zonedDateTime = (ZonedDateTime) input; - LocalDateTime localDateTime = - zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime(); - return timestampFormatter.format(localDateTime); - } else if (input instanceof Timestamp) { - return timestampFormatter.format( - ((Timestamp) input).toInstant().atZone(timestampZoneId).toLocalDateTime()); - } - return null; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlDatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlDatabaseSync.java deleted file mode 100644 index 9fdfdcaed..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlDatabaseSync.java +++ /dev/null @@ -1,271 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mysql; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.mysql.source.MySqlSource; -import org.apache.flink.cdc.connectors.mysql.source.MySqlSourceBuilder; -import org.apache.flink.cdc.connectors.mysql.source.config.MySqlSourceOptions; -import org.apache.flink.cdc.connectors.mysql.source.offset.BinlogOffset; -import org.apache.flink.cdc.connectors.mysql.source.offset.BinlogOffsetBuilder; -import org.apache.flink.cdc.connectors.mysql.table.StartupOptions; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.table.DebeziumOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.catalog.ObjectPath; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.deserialize.DorisJsonDebeziumDeserializationSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -import static org.apache.flink.cdc.debezium.utils.JdbcUrlUtils.PROPERTIES_PREFIX; - -public class MysqlDatabaseSync extends DatabaseSync { - private static final Logger LOG = LoggerFactory.getLogger(MysqlDatabaseSync.class); - private static final String JDBC_URL = "jdbc:mysql://%s:%d?useInformationSchema=true"; - - public MysqlDatabaseSync() throws SQLException { - super(); - } - - @Override - public void registerDriver() throws SQLException { - try { - Class.forName("com.mysql.cj.jdbc.Driver"); - } catch (ClassNotFoundException ex) { - LOG.warn( - "can not found class com.mysql.cj.jdbc.Driver, use class com.mysql.jdbc.Driver"); - try { - Class.forName("com.mysql.jdbc.Driver"); - } catch (Exception e) { - throw new SQLException( - "No suitable driver found, can not found class com.mysql.cj.jdbc.Driver and com.mysql.jdbc.Driver"); - } - } - } - - @Override - public Connection getConnection() throws SQLException { - Properties jdbcProperties = getJdbcProperties(); - String jdbcUrlTemplate = getJdbcUrlTemplate(JDBC_URL, jdbcProperties); - String jdbcUrl = - String.format( - jdbcUrlTemplate, - config.get(MySqlSourceOptions.HOSTNAME), - config.get(MySqlSourceOptions.PORT)); - - return DriverManager.getConnection( - jdbcUrl, - config.get(MySqlSourceOptions.USERNAME), - config.get(MySqlSourceOptions.PASSWORD)); - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(MySqlSourceOptions.DATABASE_NAME); - - List schemaList = new ArrayList<>(); - try (Connection conn = getConnection()) { - DatabaseMetaData metaData = conn.getMetaData(); - try (ResultSet catalogs = metaData.getCatalogs()) { - while (catalogs.next()) { - String tableCatalog = catalogs.getString("TABLE_CAT"); - if (tableCatalog.matches(databaseName)) { - try (ResultSet tables = - metaData.getTables( - tableCatalog, null, "%", new String[] {"TABLE"})) { - while (tables.next()) { - String tableName = tables.getString(DatabaseSyncConfig.TABLE_NAME); - String tableComment = tables.getString(DatabaseSyncConfig.REMARKS); - if (!isSyncNeeded(tableName)) { - continue; - } - SourceSchema sourceSchema = - new MysqlSchema( - metaData, tableCatalog, tableName, tableComment); - sourceSchema.setModel( - !sourceSchema.primaryKeys.isEmpty() - ? DataModel.UNIQUE - : DataModel.DUPLICATE); - schemaList.add(sourceSchema); - } - } - } - } - } - } - return schemaList; - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - MySqlSourceBuilder sourceBuilder = MySqlSource.builder(); - - String databaseName = config.get(MySqlSourceOptions.DATABASE_NAME); - Preconditions.checkNotNull(databaseName, "database-name in mysql is required"); - String tableName = config.get(MySqlSourceOptions.TABLE_NAME); - sourceBuilder - .hostname(config.get(MySqlSourceOptions.HOSTNAME)) - .port(config.get(MySqlSourceOptions.PORT)) - .username(config.get(MySqlSourceOptions.USERNAME)) - .password(config.get(MySqlSourceOptions.PASSWORD)) - .databaseList(databaseName) - .tableList(tableName); - - config.getOptional(MySqlSourceOptions.SERVER_ID).ifPresent(sourceBuilder::serverId); - config.getOptional(MySqlSourceOptions.SERVER_TIME_ZONE) - .ifPresent(sourceBuilder::serverTimeZone); - config.getOptional(MySqlSourceOptions.SCAN_SNAPSHOT_FETCH_SIZE) - .ifPresent(sourceBuilder::fetchSize); - config.getOptional(MySqlSourceOptions.CONNECT_TIMEOUT) - .ifPresent(sourceBuilder::connectTimeout); - config.getOptional(MySqlSourceOptions.CONNECT_MAX_RETRIES) - .ifPresent(sourceBuilder::connectMaxRetries); - config.getOptional(MySqlSourceOptions.CONNECTION_POOL_SIZE) - .ifPresent(sourceBuilder::connectionPoolSize); - config.getOptional(MySqlSourceOptions.HEARTBEAT_INTERVAL) - .ifPresent(sourceBuilder::heartbeatInterval); - config.getOptional(MySqlSourceOptions.SCAN_NEWLY_ADDED_TABLE_ENABLED) - .ifPresent(sourceBuilder::scanNewlyAddedTableEnabled); - config.getOptional(MySqlSourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE) - .ifPresent(sourceBuilder::splitSize); - config.getOptional(MySqlSourceOptions.SCAN_INCREMENTAL_CLOSE_IDLE_READER_ENABLED) - .ifPresent(sourceBuilder::closeIdleReaders); - - setChunkColumns(sourceBuilder); - String startupMode = config.get(MySqlSourceOptions.SCAN_STARTUP_MODE); - if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL.equalsIgnoreCase(startupMode)) { - sourceBuilder.startupOptions(StartupOptions.initial()); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_EARLIEST_OFFSET.equalsIgnoreCase( - startupMode)) { - sourceBuilder.startupOptions(StartupOptions.earliest()); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET.equalsIgnoreCase( - startupMode)) { - sourceBuilder.startupOptions(StartupOptions.latest()); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_SPECIFIC_OFFSET.equalsIgnoreCase( - startupMode)) { - BinlogOffsetBuilder offsetBuilder = BinlogOffset.builder(); - String file = config.get(MySqlSourceOptions.SCAN_STARTUP_SPECIFIC_OFFSET_FILE); - Long pos = config.get(MySqlSourceOptions.SCAN_STARTUP_SPECIFIC_OFFSET_POS); - if (file != null && pos != null) { - offsetBuilder.setBinlogFilePosition(file, pos); - } - config.getOptional(MySqlSourceOptions.SCAN_STARTUP_SPECIFIC_OFFSET_GTID_SET) - .ifPresent(offsetBuilder::setGtidSet); - config.getOptional(MySqlSourceOptions.SCAN_STARTUP_SPECIFIC_OFFSET_SKIP_EVENTS) - .ifPresent(offsetBuilder::setSkipEvents); - config.getOptional(MySqlSourceOptions.SCAN_STARTUP_SPECIFIC_OFFSET_SKIP_ROWS) - .ifPresent(offsetBuilder::setSkipRows); - sourceBuilder.startupOptions(StartupOptions.specificOffset(offsetBuilder.build())); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_TIMESTAMP.equalsIgnoreCase( - startupMode)) { - sourceBuilder.startupOptions( - StartupOptions.timestamp( - config.get(MySqlSourceOptions.SCAN_STARTUP_TIMESTAMP_MILLIS))); - } - - Properties jdbcProperties = new Properties(); - Properties debeziumProperties = new Properties(); - // date to string - debeziumProperties.putAll(DateToStringConverter.DEFAULT_PROPS); - - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(PROPERTIES_PREFIX)) { - jdbcProperties.put(key.substring(PROPERTIES_PREFIX.length()), value); - } else if (key.startsWith(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX)) { - debeziumProperties.put( - key.substring(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX.length()), value); - } - } - sourceBuilder.jdbcProperties(jdbcProperties); - sourceBuilder.debeziumProperties(debeziumProperties); - DebeziumDeserializationSchema schema; - if (ignoreDefaultValue) { - schema = new DorisJsonDebeziumDeserializationSchema(); - } else { - Map customConverterConfigs = new HashMap<>(); - customConverterConfigs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); - schema = new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - } - MySqlSource mySqlSource = - sourceBuilder.deserializer(schema).includeSchemaChanges(true).build(); - - return env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source"); - } - - @Override - public String getTableListPrefix() { - return config.get(MySqlSourceOptions.DATABASE_NAME); - } - - /** - * set chunkkeyColumn,eg: db.table1:column1,db.table2:column2. - * - * @param sourceBuilder - */ - private void setChunkColumns(MySqlSourceBuilder sourceBuilder) { - Map chunkColumnMap = getChunkColumnMap(); - for (Map.Entry entry : chunkColumnMap.entrySet()) { - sourceBuilder.chunkKeyColumn(entry.getKey(), entry.getValue()); - } - } - - private Map getChunkColumnMap() { - Map chunkMap = new HashMap<>(); - String chunkColumn = - config.getString(MySqlSourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_KEY_COLUMN); - if (!StringUtils.isNullOrWhitespaceOnly(chunkColumn)) { - final Pattern chunkPattern = Pattern.compile("(\\S+)\\.(\\S+):(\\S+)"); - String[] tblColumns = chunkColumn.split(","); - for (String tblCol : tblColumns) { - Matcher matcher = chunkPattern.matcher(tblCol); - if (matcher.find()) { - String db = matcher.group(1); - String table = matcher.group(2); - String col = matcher.group(3); - chunkMap.put(new ObjectPath(db, table), col); - } - } - } - return chunkMap; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlSchema.java deleted file mode 100644 index 3a9ffbd36..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/mysql/MysqlSchema.java +++ /dev/null @@ -1,40 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mysql; - -import org.apache.doris.flink.tools.cdc.JdbcSourceSchema; - -import java.sql.DatabaseMetaData; - -public class MysqlSchema extends JdbcSourceSchema { - - public MysqlSchema( - DatabaseMetaData metaData, String databaseName, String tableName, String tableComment) - throws Exception { - super(metaData, databaseName, null, tableName, tableComment); - } - - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return MysqlType.toDorisType(fieldType, precision, scale); - } - - @Override - public String getCdcTableName() { - return databaseName + "\\." + tableName; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDatabaseSync.java deleted file mode 100644 index 9b6844544..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDatabaseSync.java +++ /dev/null @@ -1,262 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.oracle; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.base.options.StartupOptions; -import org.apache.flink.cdc.connectors.base.source.jdbc.JdbcIncrementalSource; -import org.apache.flink.cdc.connectors.oracle.OracleSource; -import org.apache.flink.cdc.connectors.oracle.source.OracleSourceBuilder; -import org.apache.flink.cdc.connectors.oracle.source.config.OracleSourceOptions; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.DebeziumSourceFunction; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.table.DebeziumOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.Preconditions; -import org.apache.flink.util.StringUtils; - -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.deserialize.DorisJsonDebeziumDeserializationSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECTION_POOL_SIZE; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_MAX_RETRIES; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_TIMEOUT; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.CHUNK_META_GROUP_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_SNAPSHOT_FETCH_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND; - -public class OracleDatabaseSync extends DatabaseSync { - private static final Logger LOG = LoggerFactory.getLogger(OracleDatabaseSync.class); - - private static final String JDBC_URL = "jdbc:oracle:thin:@%s:%d:%s"; - private static final String PDB_KEY = "debezium.database.pdb.name"; - - public OracleDatabaseSync() throws SQLException { - super(); - } - - @Override - public void registerDriver() throws SQLException { - try { - Class.forName("oracle.jdbc.driver.OracleDriver"); - } catch (ClassNotFoundException ex) { - LOG.warn( - "can not found class oracle.jdbc.driver.OracleDriver, use class oracle.jdbc.OracleDriver"); - try { - Class.forName("oracle.jdbc.OracleDriver"); - } catch (Exception e) { - throw new SQLException( - "No suitable driver found, can not found class oracle.jdbc.driver.OracleDriver and oracle.jdbc.OracleDriver"); - } - } - } - - @Override - public Connection getConnection() throws SQLException { - String jdbcUrl; - if (!StringUtils.isNullOrWhitespaceOnly(config.get(OracleSourceOptions.URL))) { - jdbcUrl = config.get(OracleSourceOptions.URL); - } else { - jdbcUrl = - String.format( - JDBC_URL, - config.get(OracleSourceOptions.HOSTNAME), - config.get(OracleSourceOptions.PORT), - config.get(OracleSourceOptions.DATABASE_NAME)); - } - Properties pro = new Properties(); - pro.setProperty(DatabaseSyncConfig.USER, config.get(OracleSourceOptions.USERNAME)); - pro.setProperty(DatabaseSyncConfig.PASSWORD, config.get(OracleSourceOptions.PASSWORD)); - pro.put("remarksReporting", "true"); - return DriverManager.getConnection(jdbcUrl, pro); - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(OracleSourceOptions.DATABASE_NAME); - String schemaName = config.get(OracleSourceOptions.SCHEMA_NAME); - - List schemaList = new ArrayList<>(); - LOG.info("database-name {}, schema-name {}", databaseName, schemaName); - try (Connection conn = getConnection()) { - setSessionToPdb(conn); - DatabaseMetaData metaData = conn.getMetaData(); - try (ResultSet tables = - metaData.getTables(databaseName, schemaName, "%", new String[] {"TABLE"})) { - while (tables.next()) { - String tableName = tables.getString(DatabaseSyncConfig.TABLE_NAME); - String tableComment = tables.getString(DatabaseSyncConfig.REMARKS); - if (!isSyncNeeded(tableName)) { - continue; - } - SourceSchema sourceSchema = - new OracleSchema( - metaData, databaseName, schemaName, tableName, tableComment); - sourceSchema.setModel( - !sourceSchema.primaryKeys.isEmpty() - ? DataModel.UNIQUE - : DataModel.DUPLICATE); - schemaList.add(sourceSchema); - } - } - } - return schemaList; - } - - private void setSessionToPdb(Connection conn) throws SQLException { - String pdbName = null; - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - if (key.equals(PDB_KEY)) { - pdbName = entry.getValue(); - break; - } - } - if (!StringUtils.isNullOrWhitespaceOnly(pdbName)) { - LOG.info("Found pdb name in config, set session to pdb to {}", pdbName); - try (Statement statement = conn.createStatement()) { - statement.execute("alter session set container=" + pdbName); - } - } - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - Properties debeziumProperties = new Properties(); - String databaseName = config.get(OracleSourceOptions.DATABASE_NAME); - String schemaName = config.get(OracleSourceOptions.SCHEMA_NAME); - Preconditions.checkNotNull(databaseName, "database-name in oracle is required"); - Preconditions.checkNotNull(schemaName, "schema-name in oracle is required"); - String tableName = config.get(OracleSourceOptions.TABLE_NAME); - // When debezium incrementally reads, it will be judged based on regexp_like. - // When the regular length exceeds 512, an error will be reported, - // like ORA-12733: regular expression too long - if (tableName.length() > 512) { - tableName = StringUtils.isNullOrWhitespaceOnly(includingTables) ? ".*" : tableName; - } - - String url = config.get(OracleSourceOptions.URL); - String hostname = config.get(OracleSourceOptions.HOSTNAME); - Integer port = config.get(OracleSourceOptions.PORT); - String username = config.get(OracleSourceOptions.USERNAME); - String password = config.get(OracleSourceOptions.PASSWORD); - - StartupOptions startupOptions = StartupOptions.initial(); - String startupMode = config.get(OracleSourceOptions.SCAN_STARTUP_MODE); - if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL.equalsIgnoreCase(startupMode)) { - startupOptions = StartupOptions.initial(); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET.equalsIgnoreCase( - startupMode)) { - startupOptions = StartupOptions.latest(); - } - - // debezium properties set - debeziumProperties.put(DatabaseSyncConfig.DECIMAL_HANDLING_MODE, "string"); - // date to string - debeziumProperties.putAll(OracleDateConverter.DEFAULT_PROPS); - - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX)) { - debeziumProperties.put( - key.substring(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX.length()), value); - } - } - - DebeziumDeserializationSchema schema; - if (ignoreDefaultValue) { - schema = new DorisJsonDebeziumDeserializationSchema(); - } else { - Map customConverterConfigs = new HashMap<>(); - schema = new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - } - - if (config.getBoolean(OracleSourceOptions.SCAN_INCREMENTAL_SNAPSHOT_ENABLED, false)) { - JdbcIncrementalSource incrSource = - OracleSourceBuilder.OracleIncrementalSource.builder() - .hostname(hostname) - .url(url) - .port(port) - .databaseList(databaseName) - .schemaList(schemaName) - .tableList(tableName) - .username(username) - .password(password) - .includeSchemaChanges(true) - .startupOptions(startupOptions) - .deserializer(schema) - .debeziumProperties(debeziumProperties) - .splitSize(config.get(SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE)) - .splitMetaGroupSize(config.get(CHUNK_META_GROUP_SIZE)) - .fetchSize(config.get(SCAN_SNAPSHOT_FETCH_SIZE)) - .connectTimeout(config.get(CONNECT_TIMEOUT)) - .connectionPoolSize(config.get(CONNECTION_POOL_SIZE)) - .connectMaxRetries(config.get(CONNECT_MAX_RETRIES)) - .distributionFactorUpper( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND)) - .distributionFactorLower( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND)) - .build(); - return env.fromSource( - incrSource, WatermarkStrategy.noWatermarks(), "Oracle IncrSource"); - } else { - DebeziumSourceFunction oracleSource = - OracleSource.builder() - .url(url) - .hostname(hostname) - .port(port) - .username(username) - .password(password) - .database(databaseName) - .schemaList(schemaName) - .tableList(tableName) - .debeziumProperties(debeziumProperties) - .startupOptions(startupOptions) - .deserializer(schema) - .build(); - return env.addSource(oracleSource, "Oracle Source"); - } - } - - @Override - public String getTableListPrefix() { - return config.get(OracleSourceOptions.SCHEMA_NAME); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDateConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDateConverter.java deleted file mode 100644 index 183b19f1c..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleDateConverter.java +++ /dev/null @@ -1,151 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.oracle; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import oracle.sql.TIMESTAMP; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.SQLException; -import java.sql.Timestamp; -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.format.DateTimeFormatter; -import java.time.format.DateTimeFormatterBuilder; -import java.time.temporal.ChronoField; -import java.util.Properties; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class OracleDateConverter implements CustomConverter { - private static final Logger log = LoggerFactory.getLogger(OracleDateConverter.class); - private static final Pattern TO_DATE = - Pattern.compile("TO_DATE\\('(.*)',[ ]*'(.*)'\\)", Pattern.CASE_INSENSITIVE); - private static final Pattern TO_TIMESTAMP = - Pattern.compile("TO_TIMESTAMP\\('(.*)'\\)", Pattern.CASE_INSENSITIVE); - private static final Pattern TIMESTAMP_OR_DATE_REGEX = - Pattern.compile("^TIMESTAMP[(]\\d[)]$|^DATE$", Pattern.CASE_INSENSITIVE); - private ZoneId timestampZoneId = ZoneId.systemDefault(); - public static final Properties DEFAULT_PROPS = new Properties(); - private static final String DATETIME_PATTERN = DatabaseSyncConfig.DATE_TIME_FORMAT; - private static final String DATETIMEV2_PATTERN = DatabaseSyncConfig.DATETIME_MICRO_FORMAT; - private final DateTimeFormatter dateTimeV2Formatter = - DateTimeFormatter.ofPattern(DATETIMEV2_PATTERN); - - static { - DEFAULT_PROPS.setProperty(DatabaseSyncConfig.CONVERTERS, "oracleDate"); - DEFAULT_PROPS.setProperty( - "oracleDate.type", "org.apache.doris.flink.tools.cdc.oracle.OracleDateConverter"); - } - - private static final DateTimeFormatter TIMESTAMP_FORMATTER = - new DateTimeFormatterBuilder() - .parseCaseInsensitive() - .appendPattern(DatabaseSyncConfig.DATE_TIME_FORMAT) - .optionalStart() - .appendPattern(".") - .appendFraction(ChronoField.NANO_OF_SECOND, 0, 9, false) - .optionalEnd() - .toFormatter(); - - @Override - public void configure(Properties props) {} - - @Override - public void converterFor( - RelationalColumn column, ConverterRegistration registration) { - String typeName = column.typeName(); - if (TIMESTAMP_OR_DATE_REGEX.matcher(typeName).matches()) { - registration.register( - SchemaBuilder.string().optional(), - value -> { - if (value == null) { - if (column.isOptional()) { - return null; - } else if (column.hasDefaultValue()) { - return column.defaultValue(); - } else { - return null; - } - } - - if (value instanceof String) { - return convertStringTimestamp((String) value); - } - if (value instanceof Timestamp) { - return dateTimeV2Formatter.format( - ((Timestamp) value).toLocalDateTime()); - } - - // oracle timestamp - try { - if (value instanceof TIMESTAMP) { - return dateTimeV2Formatter.format( - ((TIMESTAMP) value).timestampValue().toLocalDateTime()); - } - } catch (SQLException ex) { - log.error("convert timestamp failed, values is {}", value); - } - - return null; - }); - } - } - - private String convertStringTimestamp(String data) { - LocalDateTime dateTime; - - final Matcher toTimestampMatcher = TO_TIMESTAMP.matcher(data); - if (toTimestampMatcher.matches()) { - String dateText = toTimestampMatcher.group(1); - dateTime = - LocalDateTime.from( - TIMESTAMP_FORMATTER.parse(completeMilliseconds(dateText.trim()))); - return dateTimeV2Formatter.format(dateTime.atZone(timestampZoneId)); - } - - final Matcher toDateMatcher = TO_DATE.matcher(data); - if (toDateMatcher.matches()) { - String date = toDateMatcher.group(1); - dateTime = - LocalDateTime.from( - TIMESTAMP_FORMATTER.parse(completeMilliseconds(date.trim()))); - return dateTimeV2Formatter.format(dateTime.atZone(timestampZoneId)); - } - return null; - } - - private String completeMilliseconds(String stringValue) { - if (stringValue.length() == DATETIMEV2_PATTERN.length()) { - return stringValue; - } - StringBuilder sb = new StringBuilder(stringValue); - if (stringValue.length() == DATETIME_PATTERN.length()) { - sb.append("."); - } - while (sb.toString().length() < DATETIMEV2_PATTERN.length()) { - sb.append(0); - } - return sb.toString(); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleSchema.java deleted file mode 100644 index 71e447723..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/oracle/OracleSchema.java +++ /dev/null @@ -1,60 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.oracle; - -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.apache.doris.flink.tools.cdc.JdbcSourceSchema; - -import java.sql.DatabaseMetaData; -import java.sql.SQLException; -import java.util.LinkedHashMap; - -public class OracleSchema extends JdbcSourceSchema { - - public OracleSchema( - DatabaseMetaData metaData, - String databaseName, - String schemaName, - String tableName, - String tableComment) - throws Exception { - super(metaData, databaseName, schemaName, tableName, tableComment); - } - - @Override - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return OracleType.toDorisType(fieldType, precision, scale); - } - - @Override - public String getCdcTableName() { - return schemaName + "\\." + tableName; - } - - @Override - public LinkedHashMap getColumnInfo( - DatabaseMetaData metaData, String databaseName, String schemaName, String tableName) - throws SQLException { - // Oracle permits table names to include special characters such as /, - // etc., as in 'A/B'. - // When attempting to fetch column information for `A/B` via JDBC, - // it may throw an ORA-01424 error. - // Hence, we substitute `/` with '_' to address the issue. - return super.getColumnInfo(metaData, databaseName, schemaName, tableName.replace("/", "_")); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDatabaseSync.java deleted file mode 100644 index 15fc632b4..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDatabaseSync.java +++ /dev/null @@ -1,252 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.postgres; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.cdc.connectors.base.options.StartupOptions; -import org.apache.flink.cdc.connectors.base.source.jdbc.JdbcIncrementalSource; -import org.apache.flink.cdc.connectors.postgres.PostgreSQLSource; -import org.apache.flink.cdc.connectors.postgres.source.PostgresSourceBuilder; -import org.apache.flink.cdc.connectors.postgres.source.config.PostgresSourceOptions; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.DebeziumSourceFunction; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.table.DebeziumOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.Preconditions; - -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.deserialize.DorisJsonDebeziumDeserializationSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECTION_POOL_SIZE; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_MAX_RETRIES; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_TIMEOUT; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.CHUNK_META_GROUP_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_SNAPSHOT_FETCH_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND; -import static org.apache.flink.cdc.connectors.postgres.source.config.PostgresSourceOptions.DECODING_PLUGIN_NAME; -import static org.apache.flink.cdc.connectors.postgres.source.config.PostgresSourceOptions.HEARTBEAT_INTERVAL; -import static org.apache.flink.cdc.connectors.postgres.source.config.PostgresSourceOptions.SLOT_NAME; - -public class PostgresDatabaseSync extends DatabaseSync { - private static final Logger LOG = LoggerFactory.getLogger(PostgresDatabaseSync.class); - - private static final String JDBC_URL = "jdbc:postgresql://%s:%d/%s?"; - - public PostgresDatabaseSync() throws SQLException { - super(); - } - - @Override - public void registerDriver() throws SQLException { - try { - Class.forName("org.postgresql.Driver"); - } catch (ClassNotFoundException ex) { - throw new SQLException( - "No suitable driver found, can not found class org.postgresql.Driver"); - } - } - - @Override - public Connection getConnection() throws SQLException { - Properties jdbcProperties = getJdbcProperties(); - String jdbcUrlTemplate = getJdbcUrlTemplate(JDBC_URL, jdbcProperties); - String jdbcUrl = - String.format( - jdbcUrlTemplate, - config.get(PostgresSourceOptions.HOSTNAME), - config.get(PostgresSourceOptions.PG_PORT), - config.get(PostgresSourceOptions.DATABASE_NAME)); - Properties pro = new Properties(); - pro.setProperty(DatabaseSyncConfig.USER, config.get(PostgresSourceOptions.USERNAME)); - pro.setProperty(DatabaseSyncConfig.PASSWORD, config.get(PostgresSourceOptions.PASSWORD)); - return DriverManager.getConnection(jdbcUrl, pro); - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(PostgresSourceOptions.DATABASE_NAME); - String schemaName = config.get(PostgresSourceOptions.SCHEMA_NAME); - List schemaList = new ArrayList<>(); - LOG.info("database-name {}, schema-name {}", databaseName, schemaName); - try (Connection conn = getConnection()) { - DatabaseMetaData metaData = conn.getMetaData(); - try (ResultSet tables = - metaData.getTables( - databaseName, - schemaName, - "%", - new String[] {"TABLE", "PARTITIONED TABLE"})) { - while (tables.next()) { - String tableName = tables.getString(DatabaseSyncConfig.TABLE_NAME); - String tableComment = tables.getString(DatabaseSyncConfig.REMARKS); - if (!isSyncNeeded(tableName)) { - continue; - } - SourceSchema sourceSchema = - new PostgresSchema( - metaData, databaseName, schemaName, tableName, tableComment); - sourceSchema.setModel( - !sourceSchema.primaryKeys.isEmpty() - ? DataModel.UNIQUE - : DataModel.DUPLICATE); - schemaList.add(sourceSchema); - } - } - } - return schemaList; - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - String databaseName = config.get(PostgresSourceOptions.DATABASE_NAME); - String schemaName = config.get(PostgresSourceOptions.SCHEMA_NAME); - String slotName = config.get(SLOT_NAME); - Preconditions.checkNotNull(databaseName, "database-name in postgres is required"); - Preconditions.checkNotNull(schemaName, "schema-name in postgres is required"); - Preconditions.checkNotNull(slotName, "slot.name in postgres is required"); - - String tableName = config.get(PostgresSourceOptions.TABLE_NAME); - String hostname = config.get(PostgresSourceOptions.HOSTNAME); - Integer port = config.get(PostgresSourceOptions.PG_PORT); - String username = config.get(PostgresSourceOptions.USERNAME); - String password = config.get(PostgresSourceOptions.PASSWORD); - - StartupOptions startupOptions = StartupOptions.initial(); - String startupMode = config.get(PostgresSourceOptions.SCAN_STARTUP_MODE); - if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL.equalsIgnoreCase(startupMode)) { - startupOptions = StartupOptions.initial(); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET.equalsIgnoreCase( - startupMode)) { - startupOptions = StartupOptions.latest(); - } - - // debezium properties set - Properties debeziumProperties = new Properties(); - debeziumProperties.putAll(PostgresDateConverter.DEFAULT_PROPS); - debeziumProperties.put(DatabaseSyncConfig.DECIMAL_HANDLING_MODE, "string"); - - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX)) { - debeziumProperties.put( - key.substring(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX.length()), value); - } - } - - DebeziumDeserializationSchema schema; - if (ignoreDefaultValue) { - schema = new DorisJsonDebeziumDeserializationSchema(); - } else { - Map customConverterConfigs = new HashMap<>(); - schema = new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - } - - if (config.getBoolean(SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_ENABLED, false)) { - JdbcIncrementalSource incrSource = - PostgresSourceBuilder.PostgresIncrementalSource.builder() - .hostname(hostname) - .port(port) - .database(databaseName) - .schemaList(schemaName) - .tableList(tableName) - .username(username) - .password(password) - .deserializer(schema) - .slotName(slotName) - .decodingPluginName(config.get(DECODING_PLUGIN_NAME)) - .includeSchemaChanges(true) - .debeziumProperties(debeziumProperties) - .startupOptions(startupOptions) - .splitSize(config.get(SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE)) - .splitMetaGroupSize(config.get(CHUNK_META_GROUP_SIZE)) - .fetchSize(config.get(SCAN_SNAPSHOT_FETCH_SIZE)) - .connectTimeout(config.get(CONNECT_TIMEOUT)) - .connectionPoolSize(config.get(CONNECTION_POOL_SIZE)) - .connectMaxRetries(config.get(CONNECT_MAX_RETRIES)) - .distributionFactorUpper( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND)) - .distributionFactorLower( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND)) - .heartbeatInterval(config.get(HEARTBEAT_INTERVAL)) - .build(); - return env.fromSource( - incrSource, WatermarkStrategy.noWatermarks(), "Postgres IncrSource"); - } else { - DebeziumSourceFunction postgresSource = - PostgreSQLSource.builder() - .hostname(hostname) - .port(port) - .database(databaseName) - .schemaList(schemaName) - .tableList(tableName) - .username(username) - .password(password) - .debeziumProperties(debeziumProperties) - .deserializer(schema) - .slotName(slotName) - .decodingPluginName(config.get(DECODING_PLUGIN_NAME)) - .build(); - return env.addSource(postgresSource, "Postgres Source"); - } - } - - @Override - public String getTableListPrefix() { - return config.get(PostgresSourceOptions.SCHEMA_NAME); - } - - @Override - protected String getJdbcUrlTemplate(String initialJdbcUrl, Properties jdbcProperties) { - - if (!initialJdbcUrl.startsWith("?")) { - return super.getJdbcUrlTemplate(initialJdbcUrl, jdbcProperties); - } - StringBuilder jdbcUrlBuilder = new StringBuilder(initialJdbcUrl); - int recordIndex = 0; - for (Map.Entry entry : jdbcProperties.entrySet()) { - jdbcUrlBuilder.append(entry.getKey()).append("=").append(entry.getValue()); - if (recordIndex < jdbcProperties.size() - 1) { - jdbcUrlBuilder.append("&"); - recordIndex++; - } - } - return jdbcUrlBuilder.toString(); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDateConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDateConverter.java deleted file mode 100644 index 7a967c19b..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresDateConverter.java +++ /dev/null @@ -1,133 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.postgres; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Date; -import java.sql.Timestamp; -import java.time.DateTimeException; -import java.time.Instant; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.Properties; -import java.util.function.Consumer; - -public class PostgresDateConverter implements CustomConverter { - private static final Logger log = LoggerFactory.getLogger(PostgresDateConverter.class); - private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; - private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; - - public static final Properties DEFAULT_PROPS = new Properties(); - - static { - DEFAULT_PROPS.setProperty(DatabaseSyncConfig.CONVERTERS, DatabaseSyncConfig.DATE); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_TYPE, - "org.apache.doris.flink.tools.cdc.postgres.PostgresDateConverter"); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_DATE, DatabaseSyncConfig.YEAR_MONTH_DAY_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_TIMESTAMP, DatabaseSyncConfig.DATETIME_MICRO_FORMAT); - } - - @Override - public void configure(Properties props) { - readProps( - props, - DatabaseSyncConfig.FORMAT_DATE, - p -> dateFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_TIMESTAMP, - p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); - } - - private void readProps(Properties properties, String settingKey, Consumer callback) { - String settingValue = (String) properties.get(settingKey); - if (settingValue == null || settingValue.length() == 0) { - return; - } - try { - callback.accept(settingValue.trim()); - } catch (IllegalArgumentException | DateTimeException e) { - log.error("setting {} is illegal:{}", settingKey, settingValue); - throw e; - } - } - - @Override - public void converterFor( - RelationalColumn column, ConverterRegistration registration) { - String sqlType = column.typeName().toUpperCase(); - SchemaBuilder schemaBuilder = null; - Converter converter = null; - if (DatabaseSyncConfig.UPPERCASE_DATE.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDate; - } - if (DatabaseSyncConfig.TIME.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTime; - } - if (DatabaseSyncConfig.TIMESTAMP.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTimestamp; - } - if (schemaBuilder != null) { - registration.register(schemaBuilder, converter); - } - } - - private String convertDate(Object input) { - if (input instanceof LocalDate) { - return dateFormatter.format((LocalDate) input); - } else if (input instanceof Integer) { - LocalDate date = LocalDate.ofEpochDay((Integer) input); - return dateFormatter.format(date); - } else if (input instanceof Date) { - return dateFormatter.format(((Date) input).toLocalDate()); - } - return null; - } - - private String convertTime(Object input) { - if (input instanceof String) { - return input.toString(); - } - return null; - } - - private String convertTimestamp(Object input) { - if (input instanceof Timestamp) { - return timestampFormatter.format(((Timestamp) input).toLocalDateTime()); - } else if (input instanceof Instant) { - LocalDateTime ldt = LocalDateTime.ofInstant(((Instant) input), ZoneOffset.UTC); - return timestampFormatter.format(ldt); - } - return null; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresSchema.java deleted file mode 100644 index a431c4159..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/postgres/PostgresSchema.java +++ /dev/null @@ -1,45 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.postgres; - -import org.apache.doris.flink.tools.cdc.JdbcSourceSchema; - -import java.sql.DatabaseMetaData; - -public class PostgresSchema extends JdbcSourceSchema { - - public PostgresSchema( - DatabaseMetaData metaData, - String databaseName, - String schemaName, - String tableName, - String tableComment) - throws Exception { - super(metaData, databaseName, schemaName, tableName, tableComment); - } - - @Override - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return PostgresType.toDorisType(fieldType, precision, scale); - } - - @Override - public String getCdcTableName() { - return schemaName + "\\." + tableName; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDatabaseSync.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDatabaseSync.java deleted file mode 100644 index cb6b66829..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDatabaseSync.java +++ /dev/null @@ -1,229 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.sqlserver; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions; -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.cdc.connectors.base.options.StartupOptions; -import org.apache.flink.cdc.connectors.base.source.jdbc.JdbcIncrementalSource; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; -import org.apache.flink.cdc.connectors.sqlserver.SqlServerSource; -import org.apache.flink.cdc.connectors.sqlserver.source.SqlServerSourceBuilder; -import org.apache.flink.cdc.debezium.DebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.DebeziumSourceFunction; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.cdc.debezium.table.DebeziumOptions; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.util.Preconditions; - -import org.apache.doris.flink.catalog.doris.DataModel; -import org.apache.doris.flink.tools.cdc.DatabaseSync; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.apache.doris.flink.tools.cdc.SourceSchema; -import org.apache.doris.flink.tools.cdc.deserialize.DorisJsonDebeziumDeserializationSchema; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.sql.DatabaseMetaData; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; - -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECTION_POOL_SIZE; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_MAX_RETRIES; -import static org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions.CONNECT_TIMEOUT; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.CHUNK_META_GROUP_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SCAN_SNAPSHOT_FETCH_SIZE; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND; -import static org.apache.flink.cdc.connectors.base.options.SourceOptions.SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND; - -public class SqlServerDatabaseSync extends DatabaseSync { - private static final Logger LOG = LoggerFactory.getLogger(SqlServerDatabaseSync.class); - private static final String JDBC_URL = "jdbc:sqlserver://%s:%d;database=%s;"; - private static final String PORT = "port"; - - public SqlServerDatabaseSync() throws SQLException { - super(); - } - - @Override - public void registerDriver() throws SQLException { - try { - Class.forName("com.microsoft.sqlserver.jdbc.SQLServerDriver"); - } catch (ClassNotFoundException ex) { - throw new SQLException( - "No suitable driver found, can not found class com.microsoft.sqlserver.jdbc.SQLServerDriver"); - } - } - - @Override - public Connection getConnection() throws SQLException { - Properties jdbcProperties = getJdbcProperties(); - String jdbcUrlTemplate = getJdbcUrlTemplate(JDBC_URL, jdbcProperties); - String jdbcUrl = - String.format( - jdbcUrlTemplate, - config.get(JdbcSourceOptions.HOSTNAME), - config.getInteger(PORT, 1433), - config.get(JdbcSourceOptions.DATABASE_NAME)); - Properties pro = new Properties(); - pro.setProperty(DatabaseSyncConfig.USER, config.get(JdbcSourceOptions.USERNAME)); - pro.setProperty(DatabaseSyncConfig.PASSWORD, config.get(JdbcSourceOptions.PASSWORD)); - return DriverManager.getConnection(jdbcUrl, pro); - } - - @Override - public List getSchemaList() throws Exception { - String databaseName = config.get(JdbcSourceOptions.DATABASE_NAME); - String schemaName = config.get(JdbcSourceOptions.SCHEMA_NAME); - List schemaList = new ArrayList<>(); - LOG.info("database-name {}, schema-name {}", databaseName, schemaName); - try (Connection conn = getConnection()) { - DatabaseMetaData metaData = conn.getMetaData(); - try (ResultSet tables = - metaData.getTables(databaseName, schemaName, "%", new String[] {"TABLE"})) { - while (tables.next()) { - String tableName = tables.getString(DatabaseSyncConfig.TABLE_NAME); - String tableComment = tables.getString(DatabaseSyncConfig.REMARKS); - if (!isSyncNeeded(tableName)) { - continue; - } - SourceSchema sourceSchema = - new SqlServerSchema( - metaData, databaseName, schemaName, tableName, tableComment); - sourceSchema.setModel( - !sourceSchema.primaryKeys.isEmpty() - ? DataModel.UNIQUE - : DataModel.DUPLICATE); - schemaList.add(sourceSchema); - } - } - } - return schemaList; - } - - @Override - public DataStreamSource buildCdcSource(StreamExecutionEnvironment env) { - String databaseName = config.get(JdbcSourceOptions.DATABASE_NAME); - String schemaName = config.get(JdbcSourceOptions.SCHEMA_NAME); - Preconditions.checkNotNull(databaseName, "database-name in sqlserver is required"); - Preconditions.checkNotNull(schemaName, "schema-name in sqlserver is required"); - - String tableName = config.get(JdbcSourceOptions.TABLE_NAME); - String hostname = config.get(JdbcSourceOptions.HOSTNAME); - int port = config.getInteger(PORT, 1433); - String username = config.get(JdbcSourceOptions.USERNAME); - String password = config.get(JdbcSourceOptions.PASSWORD); - - StartupOptions startupOptions = StartupOptions.initial(); - String startupMode = config.get(JdbcSourceOptions.SCAN_STARTUP_MODE); - if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL.equalsIgnoreCase(startupMode)) { - startupOptions = StartupOptions.initial(); - } else if (DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET.equalsIgnoreCase( - startupMode)) { - startupOptions = StartupOptions.latest(); - } - - // debezium properties set - Properties debeziumProperties = new Properties(); - debeziumProperties.putAll(SqlServerDateConverter.DEFAULT_PROPS); - debeziumProperties.put(DatabaseSyncConfig.DECIMAL_HANDLING_MODE, "string"); - - for (Map.Entry entry : config.toMap().entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - if (key.startsWith(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX)) { - debeziumProperties.put( - key.substring(DebeziumOptions.DEBEZIUM_OPTIONS_PREFIX.length()), value); - } - } - - DebeziumDeserializationSchema schema; - if (ignoreDefaultValue) { - schema = new DorisJsonDebeziumDeserializationSchema(); - } else { - Map customConverterConfigs = new HashMap<>(); - customConverterConfigs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); - schema = new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - } - - if (config.getBoolean(SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_ENABLED, false)) { - JdbcIncrementalSource incrSource = - SqlServerSourceBuilder.SqlServerIncrementalSource.builder() - .hostname(hostname) - .port(port) - .databaseList(databaseName) - .tableList(tableName) - .username(username) - .password(password) - .startupOptions(startupOptions) - .deserializer(schema) - .includeSchemaChanges(true) - .debeziumProperties(debeziumProperties) - .splitSize(config.get(SCAN_INCREMENTAL_SNAPSHOT_CHUNK_SIZE)) - .splitMetaGroupSize(config.get(CHUNK_META_GROUP_SIZE)) - .fetchSize(config.get(SCAN_SNAPSHOT_FETCH_SIZE)) - .connectTimeout(config.get(CONNECT_TIMEOUT)) - .connectionPoolSize(config.get(CONNECTION_POOL_SIZE)) - .connectMaxRetries(config.get(CONNECT_MAX_RETRIES)) - .distributionFactorUpper( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_UPPER_BOUND)) - .distributionFactorLower( - config.get(SPLIT_KEY_EVEN_DISTRIBUTION_FACTOR_LOWER_BOUND)) - .build(); - return env.fromSource( - incrSource, WatermarkStrategy.noWatermarks(), "SqlServer IncrSource"); - } else { - DebeziumSourceFunction sqlServerSource = - SqlServerSource.builder() - .hostname(hostname) - .port(port) - .database(databaseName) - .tableList(tableName) - .username(username) - .password(password) - .debeziumProperties(debeziumProperties) - .startupOptions(startupOptions) - .deserializer(schema) - .build(); - return env.addSource(sqlServerSource, "SqlServer Source"); - } - } - - @Override - public String getTableListPrefix() { - return config.get(JdbcSourceOptions.SCHEMA_NAME); - } - - @Override - public String getJdbcUrlTemplate(String initialJdbcUrl, Properties jdbcProperties) { - StringBuilder jdbcUrlBuilder = new StringBuilder(initialJdbcUrl); - jdbcProperties.forEach( - (key, value) -> jdbcUrlBuilder.append(key).append("=").append(value).append(";")); - return jdbcUrlBuilder.toString(); - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDateConverter.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDateConverter.java deleted file mode 100644 index e862b0eeb..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerDateConverter.java +++ /dev/null @@ -1,113 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.sqlserver; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Date; -import java.sql.Timestamp; -import java.time.DateTimeException; -import java.time.format.DateTimeFormatter; -import java.util.Properties; -import java.util.function.Consumer; - -public class SqlServerDateConverter implements CustomConverter { - private static final Logger log = LoggerFactory.getLogger(SqlServerDateConverter.class); - private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; - private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; - - public static final Properties DEFAULT_PROPS = new Properties(); - - static { - DEFAULT_PROPS.setProperty(DatabaseSyncConfig.CONVERTERS, DatabaseSyncConfig.DATE); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_TYPE, - "org.apache.doris.flink.tools.cdc.sqlserver.SqlServerDateConverter"); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_DATE, DatabaseSyncConfig.YEAR_MONTH_DAY_FORMAT); - DEFAULT_PROPS.setProperty( - DatabaseSyncConfig.DATE_FORMAT_TIMESTAMP, DatabaseSyncConfig.DATETIME_MICRO_FORMAT); - } - - @Override - public void configure(Properties props) { - readProps( - props, - DatabaseSyncConfig.FORMAT_DATE, - p -> dateFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - DatabaseSyncConfig.FORMAT_TIMESTAMP, - p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); - } - - private void readProps(Properties properties, String settingKey, Consumer callback) { - String settingValue = (String) properties.get(settingKey); - if (settingValue == null || settingValue.length() == 0) { - return; - } - try { - callback.accept(settingValue.trim()); - } catch (IllegalArgumentException | DateTimeException e) { - log.error("setting {} is illegal:{}", settingKey, settingValue); - throw e; - } - } - - @Override - public void converterFor( - RelationalColumn column, - CustomConverter.ConverterRegistration registration) { - String sqlType = column.typeName().toUpperCase(); - SchemaBuilder schemaBuilder = null; - CustomConverter.Converter converter = null; - if (DatabaseSyncConfig.UPPERCASE_DATE.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDate; - } - if (DatabaseSyncConfig.SMALLDATETIME.equals(sqlType) - || DatabaseSyncConfig.DATETIME.equals(sqlType) - || DatabaseSyncConfig.DATETIME2.equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDateTime; - } - if (schemaBuilder != null) { - registration.register(schemaBuilder, converter); - } - } - - private Object convertDateTime(Object input) { - if (input instanceof Timestamp) { - return timestampFormatter.format(((Timestamp) input).toLocalDateTime()); - } - return null; - } - - private String convertDate(Object input) { - if (input instanceof Date) { - return dateFormatter.format(((Date) input).toLocalDate()); - } - return null; - } -} diff --git a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerSchema.java b/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerSchema.java deleted file mode 100644 index 18131ce9a..000000000 --- a/flink-doris-connector/src/main/java/org/apache/doris/flink/tools/cdc/sqlserver/SqlServerSchema.java +++ /dev/null @@ -1,45 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.sqlserver; - -import org.apache.doris.flink.tools.cdc.JdbcSourceSchema; - -import java.sql.DatabaseMetaData; - -public class SqlServerSchema extends JdbcSourceSchema { - - public SqlServerSchema( - DatabaseMetaData metaData, - String databaseName, - String schemaName, - String tableName, - String tableComment) - throws Exception { - super(metaData, databaseName, schemaName, tableName, tableComment); - } - - @Override - public String convertToDorisType(String fieldType, Integer precision, Integer scale) { - return SqlServerType.toDorisType(fieldType, precision, scale); - } - - @Override - public String getCdcTableName() { - return schemaName + "\\." + tableName; - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/container/AbstractE2EService.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/container/AbstractE2EService.java deleted file mode 100644 index ec536ee68..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/container/AbstractE2EService.java +++ /dev/null @@ -1,155 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.container; - -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.core.execution.JobClient; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.container.instance.ContainerService; -import org.apache.doris.flink.container.instance.MySQLContainer; -import org.apache.doris.flink.exception.DorisRuntimeException; -import org.apache.doris.flink.tools.cdc.CdcTools; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.junit.BeforeClass; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Connection; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.concurrent.Semaphore; - -public abstract class AbstractE2EService extends AbstractContainerTestBase { - private static final Logger LOG = LoggerFactory.getLogger(AbstractE2EService.class); - private static ContainerService mysqlContainerService; - private static JobClient jobClient; - protected static final Semaphore SEMAPHORE = new Semaphore(1); - protected static final String SINK_CONF = "--" + DatabaseSyncConfig.SINK_CONF; - protected static final String DORIS_DATABASE = "--database"; - protected static final String HOSTNAME = "hostname"; - protected static final String PORT = "port"; - protected static final String USERNAME = "username"; - protected static final String PASSWORD = "password"; - protected static final String DATABASE_NAME = "database-name"; - protected static final String FENODES = "fenodes"; - protected static final String JDBC_URL = "jdbc-url"; - protected static final String SINK_LABEL_PREFIX = "sink.label-prefix"; - - @BeforeClass - public static void initE2EContainers() { - LOG.info("Trying to Start init E2E containers."); - initMySQLContainer(); - } - - private static void initMySQLContainer() { - if (Objects.nonNull(mysqlContainerService) && mysqlContainerService.isRunning()) { - LOG.info("The MySQL container has been started and is running status."); - return; - } - mysqlContainerService = new MySQLContainer(); - mysqlContainerService.startContainer(); - LOG.info("Mysql container was started."); - } - - protected String getMySQLInstanceHost() { - return mysqlContainerService.getInstanceHost(); - } - - protected Integer getMySQLQueryPort() { - return mysqlContainerService.getMappedPort(3306); - } - - protected String getMySQLUsername() { - return mysqlContainerService.getUsername(); - } - - protected String getMySQLPassword() { - return mysqlContainerService.getPassword(); - } - - protected Connection getMySQLQueryConnection() { - return mysqlContainerService.getQueryConnection(); - } - - protected void submitE2EJob(String jobName, String[] args) { - try { - LOG.info("{} e2e job will submit to start. ", jobName); - CdcTools.setStreamExecutionEnvironmentForTesting(configFlinkEnvironment()); - CdcTools.main(args); - jobClient = CdcTools.getJobClient(); - if (Objects.isNull(jobClient)) { - LOG.warn("Failed get flink job client. jobName={}", jobName); - throw new DorisRuntimeException("Failed get flink job client. jobName=" + jobName); - } - } catch (Exception e) { - LOG.warn("Failed to submit e2e job. jobName={}", jobName); - throw new DorisRuntimeException(e); - } - } - - protected void cancelE2EJob(String jobName) { - LOG.info("{} e2e job will cancel", jobName); - jobClient.cancel(); - } - - private StreamExecutionEnvironment configFlinkEnvironment() { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(DEFAULT_PARALLELISM); - Map flinkMap = new HashMap<>(); - flinkMap.put("execution.checkpointing.interval", "10s"); - flinkMap.put("pipeline.operator-chaining", "false"); - flinkMap.put("parallelism.default", "1"); - Configuration configuration = Configuration.fromMap(flinkMap); - env.configure(configuration); - env.setRestartStrategy(RestartStrategies.noRestart()); - return env; - } - - protected void setSinkConfDefaultConfig(List argList) { - // set default doris sink config - argList.add(SINK_CONF); - argList.add(FENODES + "=" + getFenodes()); - argList.add(SINK_CONF); - argList.add(USERNAME + "=" + getDorisUsername()); - argList.add(SINK_CONF); - argList.add(PASSWORD + "=" + getDorisPassword()); - argList.add(SINK_CONF); - argList.add(FENODES + "=" + getFenodes()); - argList.add(SINK_CONF); - argList.add(JDBC_URL + "=" + getDorisQueryUrl()); - argList.add(SINK_CONF); - argList.add(SINK_LABEL_PREFIX + "=" + "label"); - } - - public static void closeE2EContainers() { - LOG.info("Starting to close E2E containers."); - closeMySQLContainer(); - } - - private static void closeMySQLContainer() { - if (Objects.isNull(mysqlContainerService)) { - return; - } - mysqlContainerService.close(); - LOG.info("Mysql container was closed."); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/container/e2e/Mysql2DorisE2ECase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/container/e2e/Mysql2DorisE2ECase.java deleted file mode 100644 index 938aa2184..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/container/e2e/Mysql2DorisE2ECase.java +++ /dev/null @@ -1,393 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.container.e2e; - -import org.apache.doris.flink.container.AbstractE2EService; -import org.apache.doris.flink.container.ContainerUtils; -import org.apache.doris.flink.tools.cdc.DatabaseSyncConfig; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Arrays; -import java.util.List; - -public class Mysql2DorisE2ECase extends AbstractE2EService { - private static final Logger LOG = LoggerFactory.getLogger(Mysql2DorisE2ECase.class); - private static final String DATABASE = "test_e2e_mysql"; - private static final String CREATE_DATABASE = "CREATE DATABASE IF NOT EXISTS " + DATABASE; - private static final String MYSQL_CONF = "--" + DatabaseSyncConfig.MYSQL_CONF; - - @Before - public void setUp() throws InterruptedException { - LOG.info("Mysql2DorisE2ECase attempting to acquire semaphore."); - SEMAPHORE.acquire(); - LOG.info("Mysql2DorisE2ECase semaphore acquired."); - } - - private List setMysql2DorisDefaultConfig(List argList) { - // set default mysql config - argList.add(MYSQL_CONF); - argList.add(HOSTNAME + "=" + getMySQLInstanceHost()); - argList.add(MYSQL_CONF); - argList.add(PORT + "=" + getMySQLQueryPort()); - argList.add(MYSQL_CONF); - argList.add(USERNAME + "=" + getMySQLUsername()); - argList.add(MYSQL_CONF); - argList.add(PASSWORD + "=" + getMySQLPassword()); - argList.add(MYSQL_CONF); - argList.add(DATABASE_NAME + "=" + DATABASE); - // argList.add(MYSQL_CONF); - // argList.add("server-time-zone=UTC"); - - // set doris database - argList.add(DORIS_DATABASE); - argList.add(DATABASE); - setSinkConfDefaultConfig(argList); - return argList; - } - - private void startMysql2DorisJob(String jobName, String resourcePath) { - LOG.info("start a mysql to doris job. jobName={}, resourcePath={}", jobName, resourcePath); - List argList = ContainerUtils.parseFileArgs(resourcePath); - String[] args = setMysql2DorisDefaultConfig(argList).toArray(new String[0]); - submitE2EJob(jobName, args); - } - - private void initMysqlEnvironment(String sourcePath) { - LOG.info("Initializing MySQL environment."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), LOG, ContainerUtils.parseFileContentSQL(sourcePath)); - } - - private void initDorisEnvironment() { - LOG.info("Initializing Doris environment."); - ContainerUtils.executeSQLStatement(getDorisQueryConnection(), LOG, CREATE_DATABASE); - ContainerUtils.executeSQLStatement( - getDorisQueryConnection(), - LOG, - "DROP TABLE IF EXISTS test_e2e_mysql.tbl1", - "DROP TABLE IF EXISTS test_e2e_mysql.tbl2", - "DROP TABLE IF EXISTS test_e2e_mysql.tbl3", - "DROP TABLE IF EXISTS test_e2e_mysql.tbl4", - "DROP TABLE IF EXISTS test_e2e_mysql.tbl5"); - } - - private void initEnvironment(String jobName, String mysqlSourcePath) { - LOG.info( - "start to init mysql to doris environment. jobName={}, mysqlSourcePath={}", - jobName, - mysqlSourcePath); - initMysqlEnvironment(mysqlSourcePath); - initDorisEnvironment(); - } - - @Test - public void testMySQL2Doris() throws Exception { - String jobName = "testMySQL2Doris"; - String resourcePath = "container/e2e/mysql2doris/testMySQL2Doris.txt"; - initEnvironment(jobName, "container/e2e/mysql2doris/testMySQL2Doris_init.sql"); - startMysql2DorisJob(jobName, resourcePath); - - // wait 2 times checkpoint - Thread.sleep(20000); - LOG.info("Start to verify init result."); - List expected = Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3", "doris_5,5"); - String sql1 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, sql1, 2); - - // add incremental data - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1',10)", - "insert into test_e2e_mysql.tbl2 values ('doris_2_1',11)", - "insert into test_e2e_mysql.tbl3 values ('doris_3_1',12)", - "update test_e2e_mysql.tbl1 set age=18 where name='doris_1'", - "delete from test_e2e_mysql.tbl2 where name='doris_2'"); - Thread.sleep(20000); - - LOG.info("Start to verify incremental data result."); - List expected2 = - Arrays.asList( - "doris_1,18", "doris_1_1,10", "doris_2_1,11", "doris_3,3", "doris_3_1,12"); - String sql2 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 ) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected2, sql2, 2); - - // mock schema change - LOG.info("start to schema change in mysql."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "alter table test_e2e_mysql.tbl1 add column c1 varchar(128)", - "alter table test_e2e_mysql.tbl1 drop column age"); - Thread.sleep(10000); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1_1','c1_val')"); - Thread.sleep(20000); - LOG.info("verify tal1 schema change."); - List schemaChangeExpected = - Arrays.asList("doris_1,null", "doris_1_1,null", "doris_1_1_1,c1_val"); - String schemaChangeSql = "select * from test_e2e_mysql.tbl1 order by 1"; - ContainerUtils.checkResult( - getDorisQueryConnection(), LOG, schemaChangeExpected, schemaChangeSql, 2); - cancelE2EJob(jobName); - } - - @Test - public void testAutoAddTable() throws InterruptedException { - String jobName = "testAutoAddTable"; - initEnvironment(jobName, "container/e2e/mysql2doris/testAutoAddTable_init.sql"); - startMysql2DorisJob(jobName, "container/e2e/mysql2doris/testAutoAddTable.txt"); - - // wait 2 times checkpoint - Thread.sleep(20000); - LOG.info("Start to verify init result."); - List expected = Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3", "doris_5,5"); - String sql1 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, sql1, 2); - - // auto add table - LOG.info("starting to create auto_add table."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "CREATE TABLE test_e2e_mysql.auto_add ( \n" - + "`name` varchar(256) primary key,\n" - + "`age` int\n" - + ")", - "insert into test_e2e_mysql.auto_add values ('doris_4_1',4)", - "insert into test_e2e_mysql.auto_add values ('doris_4_2',4)"); - Thread.sleep(20000); - List autoAddResult = Arrays.asList("doris_4_1,4", "doris_4_2,4"); - String autoAddSql = "select * from test_e2e_mysql.auto_add order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, autoAddResult, autoAddSql, 2); - - // incremental data - LOG.info("starting to increment data."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1',10)", - "insert into test_e2e_mysql.tbl2 values ('doris_2_1',11)", - "insert into test_e2e_mysql.tbl3 values ('doris_3_1',12)", - "update test_e2e_mysql.tbl1 set age=18 where name='doris_1'", - "delete from test_e2e_mysql.tbl2 where name='doris_2'", - "insert into test_e2e_mysql.auto_add values ('doris_4_3',43)", - "delete from test_e2e_mysql.auto_add where name='doris_4_2'", - "update test_e2e_mysql.auto_add set age=41 where name='doris_4_1'"); - Thread.sleep(20000); - List incrementDataExpected = - Arrays.asList( - "doris_1,18", - "doris_1_1,10", - "doris_2_1,11", - "doris_3,3", - "doris_3_1,12", - "doris_4_1,41", - "doris_4_3,43"); - String incrementDataSql = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.auto_add) res order by 1"; - ContainerUtils.checkResult( - getDorisQueryConnection(), LOG, incrementDataExpected, incrementDataSql, 2); - - // schema change - LOG.info("starting to mock schema change."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "alter table test_e2e_mysql.auto_add add column c1 varchar(128)", - "alter table test_e2e_mysql.auto_add drop column age", - "insert into test_e2e_mysql.auto_add values ('doris_4_4','c1_val')"); - Thread.sleep(20000); - List schemaChangeExpected = - Arrays.asList("doris_4_1,null", "doris_4_3,null", "doris_4_4,c1_val"); - String schemaChangeSql = "select * from test_e2e_mysql.auto_add order by 1"; - ContainerUtils.checkResult( - getDorisQueryConnection(), LOG, schemaChangeExpected, schemaChangeSql, 2); - cancelE2EJob(jobName); - } - - @Test - public void testMySQL2DorisSQLParse() throws Exception { - String jobName = "testMySQL2DorisSQLParse"; - String resourcePath = "container/e2e/mysql2doris/testMySQL2DorisSQLParse.txt"; - initEnvironment(jobName, "container/e2e/mysql2doris/testMySQL2DorisSQLParse_init.sql"); - startMysql2DorisJob(jobName, resourcePath); - - // wait 2 times checkpoint - Thread.sleep(20000); - LOG.info("Start to verify init result."); - List expected = Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3", "doris_5,5"); - String sql1 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, sql1, 2); - - // add incremental data - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1',10)", - "insert into test_e2e_mysql.tbl2 values ('doris_2_1',11)", - "insert into test_e2e_mysql.tbl3 values ('doris_3_1',12)", - "update test_e2e_mysql.tbl1 set age=18 where name='doris_1'", - "delete from test_e2e_mysql.tbl2 where name='doris_2'"); - Thread.sleep(20000); - - LOG.info("Start to verify incremental data result."); - List expected2 = - Arrays.asList( - "doris_1,18", "doris_1_1,10", "doris_2_1,11", "doris_3,3", "doris_3_1,12"); - String sql2 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 ) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected2, sql2, 2); - - // mock schema change - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "alter table test_e2e_mysql.tbl1 add column c1 varchar(128)", - "alter table test_e2e_mysql.tbl1 drop column age"); - Thread.sleep(10000); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1_1','c1_val')"); - Thread.sleep(20000); - LOG.info("verify tal1 schema change."); - List schemaChangeExpected = - Arrays.asList("doris_1,null", "doris_1_1,null", "doris_1_1_1,c1_val"); - String schemaChangeSql = "select * from test_e2e_mysql.tbl1 order by 1"; - ContainerUtils.checkResult( - getDorisQueryConnection(), LOG, schemaChangeExpected, schemaChangeSql, 2); - - // mock create table - LOG.info("start to create table in mysql."); - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "CREATE TABLE test_e2e_mysql.add_tbl (\n" - + " `name` varchar(256) primary key,\n" - + " `age` int\n" - + ");", - "insert into test_e2e_mysql.add_tbl values ('doris_1',1)", - "insert into test_e2e_mysql.add_tbl values ('doris_2',2)", - "insert into test_e2e_mysql.add_tbl values ('doris_3',3)"); - Thread.sleep(20000); - List createTableExpected = Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3"); - String createTableSql = "select * from test_e2e_mysql.add_tbl order by 1"; - ContainerUtils.checkResult( - getDorisQueryConnection(), LOG, createTableExpected, createTableSql, 2); - cancelE2EJob(jobName); - } - - @Test - public void testMySQL2DorisByDefault() throws Exception { - String jobName = "testMySQL2DorisByDefault"; - initEnvironment(jobName, "container/e2e/mysql2doris/testMySQL2DorisByDefault_init.sql"); - startMysql2DorisJob(jobName, "container/e2e/mysql2doris/testMySQL2DorisByDefault.txt"); - - // wait 2 times checkpoint - Thread.sleep(20000); - LOG.info("Start to verify init result."); - List expected = Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3", "doris_5,5"); - String sql1 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, sql1, 2); - - // add incremental data - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1',10)", - "insert into test_e2e_mysql.tbl2 values ('doris_2_1',11)", - "insert into test_e2e_mysql.tbl3 values ('doris_3_1',12)", - "update test_e2e_mysql.tbl1 set age=18 where name='doris_1'", - "delete from test_e2e_mysql.tbl2 where name='doris_2'"); - Thread.sleep(20000); - - LOG.info("Start to verify incremental data result."); - List expected2 = - Arrays.asList( - "doris_1,18", "doris_1_1,10", "doris_2_1,11", "doris_3,3", "doris_3_1,12"); - String sql2 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 ) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected2, sql2, 2); - cancelE2EJob(jobName); - } - - @Test - public void testMySQL2DorisEnableDelete() throws Exception { - String jobName = "testMySQL2DorisEnableDelete"; - initEnvironment(jobName, "container/e2e/mysql2doris/testMySQL2DorisEnableDelete_init.sql"); - startMysql2DorisJob(jobName, "container/e2e/mysql2doris/testMySQL2DorisEnableDelete.txt"); - - // wait 2 times checkpoint - Thread.sleep(20000); - LOG.info("Start to verify init result."); - List initExpected = - Arrays.asList("doris_1,1", "doris_2,2", "doris_3,3", "doris_5,5"); - String sql1 = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, initExpected, sql1, 2); - - // add incremental data - ContainerUtils.executeSQLStatement( - getMySQLQueryConnection(), - LOG, - "insert into test_e2e_mysql.tbl1 values ('doris_1_1',10)", - "insert into test_e2e_mysql.tbl2 values ('doris_2_1',11)", - "insert into test_e2e_mysql.tbl3 values ('doris_3_1',12)", - "update test_e2e_mysql.tbl1 set age=18 where name='doris_1'", - "delete from test_e2e_mysql.tbl2 where name='doris_2'", - "delete from test_e2e_mysql.tbl3 where name='doris_3'", - "delete from test_e2e_mysql.tbl5 where name='doris_5'"); - - Thread.sleep(20000); - List expected = - Arrays.asList( - "doris_1,18", - "doris_1_1,10", - "doris_2,2", - "doris_2_1,11", - "doris_3,3", - "doris_3_1,12", - "doris_5,5"); - String sql = - "select * from ( select * from test_e2e_mysql.tbl1 union all select * from test_e2e_mysql.tbl2 union all select * from test_e2e_mysql.tbl3 union all select * from test_e2e_mysql.tbl5) res order by 1"; - ContainerUtils.checkResult(getDorisQueryConnection(), LOG, expected, sql, 2); - cancelE2EJob(jobName); - } - - @After - public void close() { - try { - // Ensure that semaphore is always released - } finally { - LOG.info("Mysql2DorisE2ECase releasing semaphore."); - SEMAPHORE.release(); - } - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CDCSchemaChangeExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CDCSchemaChangeExample.java deleted file mode 100644 index 8fe017e1c..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CDCSchemaChangeExample.java +++ /dev/null @@ -1,97 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.eventtime.WatermarkStrategy; -import org.apache.flink.cdc.connectors.mysql.source.MySqlSource; -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.json.JsonConverterConfig; -import org.apache.flink.cdc.debezium.JsonDebeziumDeserializationSchema; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.writer.serializer.JsonDebeziumSchemaSerializer; -import org.apache.doris.flink.utils.DateToStringConverter; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; -import java.util.UUID; - -public class CDCSchemaChangeExample { - - public static void main(String[] args) throws Exception { - - Map customConverterConfigs = new HashMap<>(); - customConverterConfigs.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "numeric"); - JsonDebeziumDeserializationSchema schema = - new JsonDebeziumDeserializationSchema(false, customConverterConfigs); - - MySqlSource mySqlSource = - MySqlSource.builder() - .hostname("127.0.0.1") - .port(3306) - .databaseList("test") // set captured database - .tableList("test.t1") // set captured table - .username("root") - .password("123456") - .debeziumProperties(DateToStringConverter.DEFAULT_PROPS) - .deserializer(schema) - .serverTimeZone("Asia/Shanghai") - .includeSchemaChanges(true) // converts SourceRecord to JSON String - .build(); - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - // enable checkpoint - env.enableCheckpointing(10000); - - Properties props = new Properties(); - props.setProperty("format", "json"); - props.setProperty("read_json_by_line", "true"); - DorisOptions dorisOptions = - DorisOptions.builder() - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("test.t1") - .setUsername("root") - .setPassword("") - .build(); - - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder - .setLabelPrefix("label-doris" + UUID.randomUUID()) - .setStreamLoadProp(props) - .setDeletable(true); - - DorisSink.Builder builder = DorisSink.builder(); - builder.setDorisReadOptions(DorisReadOptions.builder().build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setDorisOptions(dorisOptions) - .setSerializer( - JsonDebeziumSchemaSerializer.builder() - .setDorisOptions(dorisOptions) - .setNewSchemaChange(true) - .build()); - - env.fromSource(mySqlSource, WatermarkStrategy.noWatermarks(), "MySQL Source") // .print(); - .sinkTo(builder.build()); - - env.execute("Print MySQL Snapshot + Binlog"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CatalogExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CatalogExample.java deleted file mode 100644 index 3cdf30ec9..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/CatalogExample.java +++ /dev/null @@ -1,50 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.types.Row; - -public class CatalogExample { - - public static void main(String[] args) throws Exception { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - tEnv.executeSql( - "CREATE CATALOG doris_catalog WITH(\n" - + "'type' = 'doris',\n" - + "'default-database' = 'test',\n" - + "'username' = 'root',\n" - + "'password' = '',\n" - + "'fenodes' = '1127.0.0.1:8030',\n" - + "'jdbc-url' = 'jdbc:mysql://127.0.0.1:9030',\n" - + "'sink.label-prefix' = 'label'\n" - + ")"); - // define a dynamic aggregating query - final Table result = tEnv.sqlQuery("SELECT * from doris_catalog.test.type_test"); - - // print the result to the console - tEnv.toRetractStream(result, Row.class).print(); - env.execute(); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisDateAndTimestampSqlTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisDateAndTimestampSqlTest.java deleted file mode 100644 index f904ef44e..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisDateAndTimestampSqlTest.java +++ /dev/null @@ -1,71 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.table.api.EnvironmentSettings; -import org.apache.flink.table.api.TableEnvironment; - -import java.util.UUID; - -public class DorisDateAndTimestampSqlTest { - - public static void main(String[] args) { - TableEnvironment tEnv = - TableEnvironment.create(EnvironmentSettings.newInstance().inBatchMode().build()); - tEnv.executeSql( - "create table test_source ( " - + " id INT, " - + " score DECIMAL(10, 9), " - + " submit_time TIMESTAMP " - + " ) with ( " - + " 'password'='', " - + " 'connector'='doris', " - + " 'fenodes'='FE_HOST:FE_PORT', " - + " 'table.identifier'='db.source_table', " - + " 'username'='root' " - + ")"); - - tEnv.executeSql( - "create table test_sink ( " - + " id INT, " - + " score DECIMAL(10, 9), " - + " submit_time DATE " - + " ) with ( " - + " 'password'='', " - + " 'connector'='doris', " - + " 'fenodes'='FE_HOST:FE_PORT', " - + " 'sink.label-prefix' = 'label_" - + UUID.randomUUID() - + "' , " - + " 'table.identifier'='db.sink_table', " - + " 'username'='root' " - + ")"); - tEnv.executeSql( - "insert into " - + " test_sink " - + "select " - + " id, " - + " score," - + " to_date(DATE_FORMAT(submit_time, 'yyyy-MM-dd')) as submit_time " - + "from " - + " test_source " - + "where " - + " submit_time>='2022-05-31 00:00:00'") - .print(); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisIntranetAccessSinkExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisIntranetAccessSinkExample.java deleted file mode 100644 index debc81406..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisIntranetAccessSinkExample.java +++ /dev/null @@ -1,111 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.common.functions.MapFunction; -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.api.common.time.Time; -import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.streaming.api.TimeCharacteristic; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.CheckpointConfig; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.writer.serializer.SimpleStringSerializer; - -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -/** - * When the flink connector accesses doris, it parses out all surviving BE nodes according to the FE - * address filled in. - * - *

However, when the BE node is deployed, most of the internal network IP is filled in, so the BE - * node parsed by FE is the internal network IP. When flink is deployed on a non-intranet segment, - * the BE node will be inaccessible on the network. - * - *

In this case, you can access the BE node on the intranet by directly configuring {@link new - * DorisOptions.builder().setBenodes().build()}, after you configure this parameter, Flink Connector - * will not parse all BE nodes through FE nodes. - */ -public class DorisIntranetAccessSinkExample { - - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - env.enableCheckpointing(10000); - env.getCheckpointConfig() - .enableExternalizedCheckpoints( - CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); - env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.milliseconds(30000))); - - DorisSink.Builder builder = DorisSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - readOptionBuilder - .setDeserializeArrowAsync(false) - .setDeserializeQueueSize(64) - .setExecMemLimit(2147483648L) - .setRequestQueryTimeoutS(3600) - .setRequestBatchSize(1000) - .setRequestConnectTimeoutMs(10000) - .setRequestReadTimeoutMs(10000) - .setRequestRetries(3) - .setRequestTabletSize(1024 * 1024); - - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("10.20.30.1:8030") - .setBenodes("10.20.30.1:8040, 10.20.30.2:8040, 10.20.30.3:8040") - .setTableIdentifier("test.test_sink") - .setUsername("root") - .setPassword(""); - - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder - .disable2PC() - .setLabelPrefix("label-doris") - .setStreamLoadProp(properties) - .setBufferSize(8 * 1024) - .setBufferCount(3); - - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setSerializer(new SimpleStringSerializer()) - .setDorisOptions(dorisBuilder.build()); - - List> data = new ArrayList<>(); - data.add(new Tuple2<>(1, "zhangsan")); - data.add(new Tuple2<>(2, "lisi")); - data.add(new Tuple2<>(3, "wangwu")); - DataStreamSource> source = env.fromCollection(data); - source.map((MapFunction, String>) t -> t.f0 + "," + t.f1) - .sinkTo(builder.build()); - env.execute("doris test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkArraySQLExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkArraySQLExample.java deleted file mode 100644 index 99514d328..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkArraySQLExample.java +++ /dev/null @@ -1,138 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; - -import java.util.UUID; - -public class DorisSinkArraySQLExample { - - public static void main(String[] args) throws Exception { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - tEnv.executeSql( - "CREATE TABLE source (\n" - + " `id` int,\n" - + " `c_1` array ,\n" - + " `c_2` array ,\n" - + " `c_3` array ,\n" - + " `c_4` array ,\n" - + " `c_5` array ,\n" - + " `c_6` array ,\n" - + " `c_7` array,\n" - + " `c_8` array ,\n" - + " `c_9` array ,\n" - + " `c_10` array ,\n" - + " `c_11` array ,\n" - + " `c_12` array ,\n" - + " `c_13` array ,\n" - + " `c_14` array ,\n" - + " `c_15` array ,\n" - + " `c_16` array \n" - + ") WITH (\n" - + " 'connector' = 'datagen', \n" - + " 'fields.c_7.element.min' = '1', \n" - + " 'fields.c_7.element.max' = '10', \n" - + " 'fields.c_8.element.min' = '1', \n" - + " 'fields.c_8.element.max' = '10', \n" - + " 'fields.c_14.element.length' = '10', \n" - + " 'fields.c_15.element.length' = '10', \n" - + " 'fields.c_16.element.length' = '10', \n" - + " 'number-of-rows' = '5' \n" - + ");"); - - tEnv.executeSql( - "CREATE TABLE source_doris (" - + " `id` int,\n" - + " `c_1` array ,\n" - + " `c_2` array ,\n" - + " `c_3` array ,\n" - + " `c_4` array ,\n" - + " `c_5` array ,\n" - + " `c_6` array ,\n" - + " `c_7` array ,\n" - + " `c_8` array ,\n" - + " `c_9` array ,\n" - + " `c_10` array ,\n" - + // ARRAY - " `c_11` array ,\n" - + // ARRAY - " `c_12` array ,\n" - + // ARRAY - " `c_13` array ,\n" - + // ARRAY - " `c_14` array ,\n" - + " `c_15` array ,\n" - + " `c_16` array \n" - + ") WITH (" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'table.identifier' = 'test.array_test_type',\n" - + " 'username' = 'root',\n" - + " 'password' = ''\n" - + ")"); - - // define a dynamic aggregating query - // final Table result = tEnv.sqlQuery("SELECT * from source_doris "); - - // print the result to the console - // tEnv.toRetractStream(result, Row.class).print(); - // env.execute(); - - tEnv.executeSql( - "CREATE TABLE sink (" - + " `id` int,\n" - + " `c_1` array ,\n" - + " `c_2` array ,\n" - + " `c_3` array ,\n" - + " `c_4` array ,\n" - + " `c_5` array ,\n" - + " `c_6` array ,\n" - + " `c_7` array ,\n" - + " `c_8` array ,\n" - + " `c_9` array ,\n" - + " `c_10` array ,\n" - + // ARRAY - " `c_11` array ,\n" - + // ARRAY - " `c_12` array ,\n" - + // ARRAY - " `c_13` array ,\n" - + // ARRAY - " `c_14` array ,\n" - + " `c_15` array ,\n" - + " `c_16` array \n" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'table.identifier' = 'test.array_test_type_sink',\n" - + " 'username' = 'root',\n" - + " 'password' = '',\n" - + " 'sink.label-prefix' = 'doris_label4" - + UUID.randomUUID() - + "'" - + ")"); - tEnv.executeSql("INSERT INTO sink select * from source_doris"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkBatchExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkBatchExample.java deleted file mode 100644 index 7a111b258..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkBatchExample.java +++ /dev/null @@ -1,161 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.streaming.api.functions.source.SourceFunction; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.batch.DorisBatchSink; -import org.apache.doris.flink.sink.writer.WriteMode; -import org.apache.doris.flink.sink.writer.serializer.SimpleStringSerializer; - -import java.util.Arrays; -import java.util.Properties; -import java.util.UUID; - -public class DorisSinkBatchExample { - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - // env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); - env.enableCheckpointing(5000); - - // env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); - // env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, - // Time.milliseconds(30000))); - DorisSink.Builder builder = DorisSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - readOptionBuilder - .setDeserializeArrowAsync(false) - .setDeserializeQueueSize(64) - .setExecMemLimit(2147483648L) - .setRequestQueryTimeoutS(3600) - .setRequestBatchSize(1000) - .setRequestConnectTimeoutMs(10000) - .setRequestReadTimeoutMs(10000) - .setRequestRetries(3) - .setRequestTabletSize(1024 * 1024); - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("test.test_flink") - .setUsername("root") - .setPassword(""); - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder - .setLabelPrefix("label") - .setStreamLoadProp(properties) - .setDeletable(false) - .setBufferFlushMaxBytes(8 * 1024) - .setBufferFlushMaxRows(900) - .setBufferFlushIntervalMs(1000 * 10) - // .setBatchMode(true); - .setWriteMode(WriteMode.STREAM_LOAD_BATCH); - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setSerializer(new SimpleStringSerializer()) - .setDorisOptions(dorisBuilder.build()); - - env.addSource( - new SourceFunction() { - private Long id = 0L; - - @Override - public void run(SourceContext out) throws Exception { - while (true) { - id = id + 1; - String record = id + "," + UUID.randomUUID() + "," + id + ""; - out.collect(record); - Thread.sleep(500); - } - } - - @Override - public void cancel() {} - }) - .sinkTo(builder.build()); - - env.execute("doris batch test"); - } - - public void testBatchFlush() throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - - DorisBatchSink.Builder builder = DorisBatchSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - - readOptionBuilder - .setDeserializeArrowAsync(false) - .setDeserializeQueueSize(64) - .setExecMemLimit(2147483648L) - .setRequestQueryTimeoutS(3600) - .setRequestBatchSize(1000) - .setRequestConnectTimeoutMs(10000) - .setRequestReadTimeoutMs(10000) - .setRequestRetries(3) - .setRequestTabletSize(1024 * 1024); - - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("test.testd") - .setUsername("root") - .setPassword(""); - - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - - executionBuilder - .setLabelPrefix("label") - .setStreamLoadProp(properties) - .setDeletable(false) - .setBufferFlushMaxBytes(8 * 1024) - .setBufferFlushMaxRows(1) - .setBufferFlushIntervalMs(1000 * 10); - - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setSerializer(new SimpleStringSerializer()) - .setDorisOptions(dorisBuilder.build()); - - DataStreamSource stringDataStreamSource = - env.fromCollection( - Arrays.asList( - "1,-74159.9193252453", - "2,-74159.9193252453", - "3,-19.7004480979", - "4,43385.2170333507", - "5,-16.2602598554")); - stringDataStreamSource.sinkTo(builder.build()); - - env.execute("doris batch test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExample.java deleted file mode 100644 index 35ef73fd0..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExample.java +++ /dev/null @@ -1,92 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.common.functions.MapFunction; -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.api.common.time.Time; -import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.streaming.api.TimeCharacteristic; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.CheckpointConfig; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.writer.serializer.SimpleStringSerializer; - -import java.util.ArrayList; -import java.util.List; -import java.util.Properties; - -public class DorisSinkExample { - - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - env.enableCheckpointing(10000); - env.getCheckpointConfig() - .enableExternalizedCheckpoints( - CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); - env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.milliseconds(30000))); - DorisSink.Builder builder = DorisSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - readOptionBuilder - .setDeserializeArrowAsync(false) - .setDeserializeQueueSize(64) - .setExecMemLimit(2147483648L) - .setRequestQueryTimeoutS(3600) - .setRequestBatchSize(1000) - .setRequestConnectTimeoutMs(10000) - .setRequestReadTimeoutMs(10000) - .setRequestRetries(3) - .setRequestTabletSize(1024 * 1024); - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8040") - .setTableIdentifier("db.table") - .setUsername("test") - .setPassword("test"); - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder - .setLabelPrefix("label-doris") - .setStreamLoadProp(properties) - .setBufferSize(8 * 1024) - .setBufferCount(3); - - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setSerializer(new SimpleStringSerializer()) - .setDorisOptions(dorisBuilder.build()); - - List> data = new ArrayList<>(); - data.add(new Tuple2<>("doris", 1)); - DataStreamSource> source = env.fromCollection(data); - source.map((MapFunction, String>) t -> t.f0 + "," + t.f1) - .sinkTo(builder.build()); - env.execute("doris test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExampleRowData.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExampleRowData.java deleted file mode 100644 index 8037e2eaa..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkExampleRowData.java +++ /dev/null @@ -1,111 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.common.functions.FlatMapFunction; -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.api.common.time.Time; -import org.apache.flink.streaming.api.TimeCharacteristic; -import org.apache.flink.streaming.api.datastream.DataStream; -import org.apache.flink.streaming.api.environment.CheckpointConfig; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.DataTypes; -import org.apache.flink.table.data.GenericRowData; -import org.apache.flink.table.data.RowData; -import org.apache.flink.table.data.StringData; -import org.apache.flink.table.types.DataType; -import org.apache.flink.util.Collector; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.writer.LoadConstants; -import org.apache.doris.flink.sink.writer.serializer.RowDataSerializer; - -import java.util.Properties; -import java.util.UUID; - -public class DorisSinkExampleRowData { - - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - env.enableCheckpointing(10000); - env.setParallelism(1); - env.getCheckpointConfig() - .enableExternalizedCheckpoints( - CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); - env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.milliseconds(30000))); - DorisSink.Builder builder = DorisSink.builder(); - - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - // properties.setProperty("read_json_by_line", "true"); - // properties.setProperty("format", "json"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("db.tbl") - .setUsername("root") - .setPassword(""); - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder.setLabelPrefix(UUID.randomUUID().toString()).setStreamLoadProp(properties); - - // flink rowdata‘s schema - String[] fields = {"name", "age"}; - DataType[] types = {DataTypes.VARCHAR(256), DataTypes.INT()}; - - builder.setDorisExecutionOptions(executionBuilder.build()) - .setSerializer( - RowDataSerializer.builder() // serialize according to rowdata - .setType(LoadConstants.CSV) // .setType(LoadConstants.CSV) - .setFieldDelimiter(",") - .setFieldNames(fields) // .setFieldDelimiter(",") - .setFieldType(types) - .build()) - .setDorisOptions(dorisBuilder.build()); - - // mock rowdata source - DataStream source = - env.fromElements("") - .flatMap( - new FlatMapFunction() { - @Override - public void flatMap(String s, Collector out) - throws Exception { - GenericRowData genericRowData = new GenericRowData(2); - genericRowData.setField( - 0, StringData.fromString("beijing")); - genericRowData.setField(1, 123); - out.collect(genericRowData); - - GenericRowData genericRowData2 = new GenericRowData(2); - genericRowData2.setField( - 0, StringData.fromString("shanghai")); - genericRowData2.setField(1, 1234); - out.collect(genericRowData2); - } - }); - - source.sinkTo(builder.build()); - env.execute("doris test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkMultiTableExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkMultiTableExample.java deleted file mode 100644 index feff8b326..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkMultiTableExample.java +++ /dev/null @@ -1,115 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.streaming.api.functions.source.SourceFunction; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.batch.DorisBatchSink; -import org.apache.doris.flink.sink.batch.RecordWithMeta; -import org.apache.doris.flink.sink.writer.serializer.RecordWithMetaSerializer; - -import java.util.Properties; -import java.util.UUID; - -public class DorisSinkMultiTableExample { - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - - DorisBatchSink.Builder builder = DorisBatchSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - - readOptionBuilder - .setDeserializeArrowAsync(false) - .setDeserializeQueueSize(64) - .setExecMemLimit(2147483648L) - .setRequestQueryTimeoutS(3600) - .setRequestBatchSize(1000) - .setRequestConnectTimeoutMs(10000) - .setRequestReadTimeoutMs(10000) - .setRequestRetries(3) - .setRequestTabletSize(1024 * 1024); - - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("test.test_flink_tmp") - .setUsername("root") - .setPassword(""); - - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - - executionBuilder - .setLabelPrefix("label") - .setStreamLoadProp(properties) - .setDeletable(false) - .setBufferFlushMaxBytes(8 * 1024) - .setBufferFlushMaxRows(10) - .setBufferFlushIntervalMs(1000 * 10); - - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setDorisOptions(dorisBuilder.build()) - .setSerializer(new RecordWithMetaSerializer()); - - // RecordWithMeta record = new RecordWithMeta("test", "test_flink_tmp1", "wangwu,1"); - // RecordWithMeta record1 = new RecordWithMeta("test", "test_flink_tmp", "wangwu,1"); - // DataStreamSource stringDataStreamSource = env.fromCollection( - // Arrays.asList(record, record1)); - // stringDataStreamSource.sinkTo(builder.build()); - - env.addSource( - new SourceFunction() { - private Long id = 1000000L; - - @Override - public void run(SourceContext out) throws Exception { - while (true) { - id = id + 1; - RecordWithMeta record = - new RecordWithMeta( - "test", - "test_flink_tmp1", - UUID.randomUUID() + ",1"); - out.collect(record); - record = - new RecordWithMeta( - "test", - "test_flink_tmp", - UUID.randomUUID() + ",1"); - out.collect(record); - Thread.sleep(3000); - } - } - - @Override - public void cancel() {} - }) - .sinkTo(builder.build()); - - env.execute("doris multi table test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkSQLExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkSQLExample.java deleted file mode 100644 index a31ab8a55..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkSQLExample.java +++ /dev/null @@ -1,63 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; - -import java.util.ArrayList; -import java.util.List; - -import static org.apache.flink.table.api.Expressions.$; - -public class DorisSinkSQLExample { - - public static void main(String[] args) { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - - List> data = new ArrayList<>(); - data.add(new Tuple2<>("doris", 1)); - DataStreamSource> source = env.fromCollection(data); - tEnv.createTemporaryView("doris_test", source, $("name"), $("age")); - - tEnv.executeSql( - "CREATE TABLE doris_test_sink (" - + "name STRING," - + "age INT" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = 'FE_IP:8030',\n" - + " 'table.identifier' = 'db.table',\n" - + " 'username' = 'root',\n" - + " 'password' = '',\n" - + " 'sink.properties.format' = 'json',\n" - + " 'sink.buffer-count' = '4',\n" - + " 'sink.buffer-size' = '4086'," - + " 'sink.label-prefix' = 'doris_label',\n" - + " 'sink.properties.read_json_by_line' = 'true'\n" - + ")"); - tEnv.executeSql("INSERT INTO doris_test_sink select name,age from doris_test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkStreamMultiTableExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkStreamMultiTableExample.java deleted file mode 100644 index e2b8dae9f..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSinkStreamMultiTableExample.java +++ /dev/null @@ -1,120 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.restartstrategy.RestartStrategies; -import org.apache.flink.api.common.time.Time; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.CheckpointConfig; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction; - -import org.apache.doris.flink.cfg.DorisExecutionOptions; -import org.apache.doris.flink.cfg.DorisOptions; -import org.apache.doris.flink.cfg.DorisReadOptions; -import org.apache.doris.flink.sink.DorisSink; -import org.apache.doris.flink.sink.batch.RecordWithMeta; -import org.apache.doris.flink.sink.writer.serializer.RecordWithMetaSerializer; - -import java.util.Properties; -import java.util.UUID; - -public class DorisSinkStreamMultiTableExample { - public static void main(String[] args) throws Exception { - Configuration config = new Configuration(); - // config.setString("execution.savepoint.path","/tmp/checkpoint/chk-6"); - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(config); - env.setParallelism(1); - env.getCheckpointConfig().setCheckpointStorage("file:///tmp/checkpoint/"); - env.getCheckpointConfig() - .enableExternalizedCheckpoints( - CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); - env.setRestartStrategy(RestartStrategies.fixedDelayRestart(5, Time.milliseconds(10000))); - env.enableCheckpointing(10000); - DorisSink.Builder builder = DorisSink.builder(); - final DorisReadOptions.Builder readOptionBuilder = DorisReadOptions.builder(); - Properties properties = new Properties(); - properties.setProperty("column_separator", ","); - properties.setProperty("line_delimiter", "\n"); - properties.setProperty("format", "csv"); - DorisOptions.Builder dorisBuilder = DorisOptions.builder(); - dorisBuilder - .setFenodes("127.0.0.1:8030") - .setTableIdentifier("") - .setUsername("root") - .setPassword(""); - - DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder(); - executionBuilder - .setLabelPrefix("xxx12") - .setStreamLoadProp(properties) - .setDeletable(false) - .enable2PC(); - - builder.setDorisReadOptions(readOptionBuilder.build()) - .setDorisExecutionOptions(executionBuilder.build()) - .setDorisOptions(dorisBuilder.build()) - .setSerializer(new RecordWithMetaSerializer()); - - // RecordWithMeta record = new RecordWithMeta("test", "test_flink_tmp1", "wangwu,1"); - // RecordWithMeta record1 = new RecordWithMeta("test", "test_flink_tmp", "wangwu,1"); - // DataStreamSource stringDataStreamSource = env.fromCollection( - // Arrays.asList(record, record1)); - // stringDataStreamSource.sinkTo(builder.build()); - - env.addSource( - new ParallelSourceFunction() { - private Long id = 1000000L; - - @Override - public void run(SourceContext out) throws Exception { - while (true) { - id = id + 1; - RecordWithMeta record = - new RecordWithMeta( - "test", - "test_flink_a", - UUID.randomUUID() + ",1"); - out.collect(record); - record = - new RecordWithMeta( - "test", - "test_flink_b", - UUID.randomUUID() + ",2"); - out.collect(record); - if (id > 100) { - // mock dynamic add table - RecordWithMeta record3 = - new RecordWithMeta( - "test", - "test_flink_c", - UUID.randomUUID() + ",1"); - out.collect(record3); - } - Thread.sleep(3000); - } - } - - @Override - public void cancel() {} - }) - .sinkTo(builder.build()); - - env.execute("doris stream multi table test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceDataStream.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceDataStream.java deleted file mode 100644 index ee3fa1357..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceDataStream.java +++ /dev/null @@ -1,46 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.cfg.DorisStreamOptions; -import org.apache.doris.flink.datastream.DorisSourceFunction; -import org.apache.doris.flink.deserialization.SimpleListDeserializationSchema; - -import java.util.Properties; - -public class DorisSourceDataStream { - - public static void main(String[] args) throws Exception { - Properties properties = new Properties(); - properties.put("fenodes", "FE_IP:8030"); - properties.put("username", "root"); - properties.put("password", ""); - properties.put("table.identifier", "db.table"); - properties.put("doris.read.field", "id,code,name"); - properties.put("doris.filter.query", "name='doris'"); - DorisStreamOptions options = new DorisStreamOptions(properties); - - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(2); - env.addSource(new DorisSourceFunction(options, new SimpleListDeserializationSchema())) - .print(); - env.execute("Flink doris test"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceExample.java deleted file mode 100644 index 90e461cfc..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceExample.java +++ /dev/null @@ -1,64 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; - -public class DorisSourceExample { - - public static void main(String[] args) throws Exception { - - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - - // register a table in the catalog - tEnv.executeSql( - "CREATE TABLE doris_source (" - + "bigint_1 BIGINT," - + "char_1 STRING," - + "date_1 STRING," - + "datetime_1 STRING," - + "decimal_1 DECIMAL(5,2)," - + "double_1 DOUBLE," - + "float_1 FLOAT ," - + "int_1 INT ," - + "largeint_1 STRING, " - + "smallint_1 SMALLINT, " - + "tinyint_1 TINYINT, " - + "varchar_1 STRING " - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = 'FE_IP:8030',\n" - + " 'table.identifier' = 'db.table',\n" - + " 'username' = 'root',\n" - + " 'password' = ''\n" - + ")"); - - // define a dynamic aggregating query - final Table result = tEnv.sqlQuery("SELECT * from doris_source "); - - // print the result to the console - tEnv.toDataStream(result).print(); - env.execute(); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceSinkExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceSinkExample.java deleted file mode 100644 index d64a39673..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/DorisSourceSinkExample.java +++ /dev/null @@ -1,145 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.common.RuntimeExecutionMode; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.types.Row; - -import java.util.UUID; - -public class DorisSourceSinkExample { - - public static void main(String[] args) throws Exception { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.setRuntimeMode(RuntimeExecutionMode.BATCH); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - tEnv.executeSql( - "CREATE TABLE doris_test (" - + " id int,\n" - + " c_1 boolean,\n" - + " c_2 tinyint,\n" - + " c_3 smallint,\n" - + " c_4 int,\n" - + " c_5 bigint,\n" - + " c_6 bigint,\n" - + " c_7 float,\n" - + " c_8 double,\n" - + " c_9 DECIMAL(4,2),\n" - + " c_10 DECIMAL(4,1),\n" - + " c_11 date,\n" - + " c_12 date,\n" - + " c_13 timestamp,\n" - + " c_14 timestamp,\n" - + " c_15 string,\n" - + " c_16 string,\n" - + " c_17 string,\n" - + " c_18 array,\n" - + " c_19 Map\n" - + ") " - + "WITH (\n" - + " 'connector' = 'datagen', \n" - + " 'fields.c_6.max' = '5', \n" - + " 'fields.c_9.max' = '5', \n" - + " 'fields.c_10.max' = '5', \n" - + " 'fields.c_15.length' = '5', \n" - + " 'fields.c_16.length' = '5', \n" - + " 'fields.c_17.length' = '5', \n" - + " 'fields.c_19.key.length' = '5', \n" - + " 'connector' = 'datagen', \n" - + " 'number-of-rows' = '1' \n" - + ")"); - - final Table result = tEnv.sqlQuery("SELECT * from doris_test "); - - // print the result to the console - tEnv.toRetractStream(result, Row.class).print(); - env.execute(); - - tEnv.executeSql( - "CREATE TABLE source_doris (" - + " id int,\n" - + " c_1 boolean,\n" - + " c_2 tinyint,\n" - + " c_3 smallint,\n" - + " c_4 int,\n" - + " c_5 bigint,\n" - + " c_6 string,\n" - + " c_7 float,\n" - + " c_8 double,\n" - + " c_9 DECIMAL(4,2),\n" - + " c_10 DECIMAL(4,1),\n" - + " c_11 date,\n" - + " c_12 date,\n" - + " c_13 timestamp,\n" - + " c_14 timestamp,\n" - + " c_15 string,\n" - + " c_16 string,\n" - + " c_17 string,\n" - + " c_18 array,\n" - + " c_19 string\n" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'table.identifier' = 'test.test_all_type',\n" - + " 'username' = 'root',\n" - + " 'password' = ''\n" - + ")"); - - tEnv.executeSql( - "CREATE TABLE doris_test_sink (" - + " id int,\n" - + " c_1 boolean,\n" - + " c_2 tinyint,\n" - + " c_3 smallint,\n" - + " c_4 int,\n" - + " c_5 bigint,\n" - + " c_6 string,\n" - + " c_7 float,\n" - + " c_8 double,\n" - + " c_9 DECIMAL(4,2),\n" - + " c_10 DECIMAL(4,1),\n" - + " c_11 date,\n" - + " c_12 date,\n" - + " c_13 timestamp,\n" - + " c_14 timestamp,\n" - + " c_15 string,\n" - + " c_16 string,\n" - + " c_17 string,\n" - + " c_18 array,\n" - + " c_19 string\n" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'table.identifier' = 'test.test_all_type_sink',\n" - + " 'username' = 'root',\n" - + " 'password' = '',\n" - + " 'sink.properties.format' = 'csv',\n" - + " 'sink.label-prefix' = 'doris_label4" - + UUID.randomUUID() - + "'" - + ")"); - - tEnv.executeSql("INSERT INTO doris_test_sink select * from source_doris"); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinCdcExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinCdcExample.java deleted file mode 100644 index f972d6e27..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinCdcExample.java +++ /dev/null @@ -1,83 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.types.Row; - -public class LookupJoinCdcExample { - public static void main(String[] args) throws Exception { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - // env.disableOperatorChaining(); - env.enableCheckpointing(30000); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - - tEnv.executeSql( - "CREATE TABLE mysql_tb (" - + "id INT," - + "name STRING," - + "process_time as proctime()," - + "primary key(id) NOT ENFORCED" - + ") " - + "WITH (\n" - + " 'connector' = 'mysql-cdc',\n" - + " 'hostname' = '127.0.0.1',\n" - + " 'port' = '3306',\n" - + " 'username' = 'root',\n" - + " 'password' = '123456',\n" - + " 'database-name' = 'test',\n" - + " 'scan.startup.mode' = 'latest-offset',\n" - + " 'server-time-zone' = 'Asia/Shanghai',\n" - + " 'table-name' = 'fact_table' " - + ")"); - - tEnv.executeSql( - "CREATE TABLE doris_tb (" - + "id INT," - + "age INT," - + "dt DATE," - + "dtime TIMESTAMP," - + "primary key(id) NOT ENFORCED" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'jdbc-url' = 'jdbc:mysql://127.0.0.1:9030',\n" - + " 'table.identifier' = 'test.dim_table_dt',\n" - + " 'lookup.cache.max-rows' = '1000'," - + " 'lookup.cache.ttl' = '1 hour'," - + " 'lookup.jdbc.async' = 'true',\n" - + " 'username' = 'root',\n" - + " 'password' = ''\n" - + ")"); - - Table table = - tEnv.sqlQuery( - "SELECT a.id, a.name, b.age, b.dt, b.dtime\n" - + "FROM mysql_tb a\n" - + " left join doris_tb FOR SYSTEM_TIME AS OF a.process_time AS b\n" - + " ON a.id = b.id"); - - tEnv.toRetractStream(table, Row.class).print(); - - env.execute(); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinExample.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinExample.java deleted file mode 100644 index 9e82c6d41..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/example/LookupJoinExample.java +++ /dev/null @@ -1,93 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.example; - -import org.apache.flink.api.java.tuple.Tuple2; -import org.apache.flink.streaming.api.datastream.DataStreamSource; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; -import org.apache.flink.streaming.api.functions.source.SourceFunction; -import org.apache.flink.table.api.Table; -import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; -import org.apache.flink.types.Row; - -import java.util.UUID; - -import static org.apache.flink.table.api.Expressions.$; - -public class LookupJoinExample { - - public static void main(String[] args) throws Exception { - final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.disableOperatorChaining(); - env.enableCheckpointing(30000); - final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env); - - DataStreamSource> source = - env.addSource( - new SourceFunction>() { - private Integer id = 1; - - @Override - public void run(SourceContext> out) - throws Exception { - while (true) { - Tuple2 record = - new Tuple2<>(id++, UUID.randomUUID().toString()); - out.collect(record); - Thread.sleep(1000); - } - } - - @Override - public void cancel() {} - }); - tEnv.createTemporaryView( - "doris_source", source, $("id"), $("uuid"), $("process_time").proctime()); - - tEnv.executeSql( - "CREATE TABLE lookup_dim_tbl (" - + " c_custkey int," - + " c_name string," - + " c_address string," - + " c_city string," - + " c_nation string," - + " c_region string," - + " c_phone string," - + " c_mktsegment string" - + ") " - + "WITH (\n" - + " 'connector' = 'doris',\n" - + " 'fenodes' = '127.0.0.1:8030',\n" - + " 'jdbc-url' = 'jdbc:mysql://127.0.0.1:9030',\n" - + " 'table.identifier' = 'ssb.customer',\n" - + " 'lookup.jdbc.async' = 'true',\n" - + " 'username' = 'root',\n" - + " 'password' = ''\n" - + ")"); - - Table table = - tEnv.sqlQuery( - "select a.id,a.uuid,b.c_name,b.c_nation,b.c_phone from doris_source a " - + "left join lookup_dim_tbl FOR SYSTEM_TIME AS OF a.process_time b " - + "ON a.id = b.c_custkey"); - - tEnv.toRetractStream(table, Row.class).print(); - env.execute(); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcDb2SyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcDb2SyncDatabaseCase.java deleted file mode 100644 index 055365570..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcDb2SyncDatabaseCase.java +++ /dev/null @@ -1,100 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions; -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.db2.Db2DatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcDb2SyncDatabaseCase { - - public static void main(String[] args) throws Exception { - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.disableOperatorChaining(); - env.enableCheckpointing(10000); - - String database = "db2_test"; - String tablePrefix = ""; - String tableSuffix = ""; - Map sourceConfig = new HashMap<>(); - sourceConfig.put(JdbcSourceOptions.DATABASE_NAME.key(), "testdb"); - sourceConfig.put(JdbcSourceOptions.SCHEMA_NAME.key(), "DB2INST1"); - sourceConfig.put(JdbcSourceOptions.HOSTNAME.key(), "127.0.0.1"); - sourceConfig.put(Db2DatabaseSync.PORT.key(), "50000"); - sourceConfig.put(JdbcSourceOptions.USERNAME.key(), "db2inst1"); - sourceConfig.put(JdbcSourceOptions.PASSWORD.key(), "=doris123456"); - sourceConfig.put(SourceOptions.SCAN_INCREMENTAL_SNAPSHOT_ENABLED.key(), "true"); - // add jdbc properties configuration - sourceConfig.put("jdbc.properties.allowNextOnExhaustedResultSet", "1"); - sourceConfig.put("jdbc.properties.resultSetHoldability", "1"); - sourceConfig.put("jdbc.properties.SSL", "false"); - - Configuration config = Configuration.fromMap(sourceConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "127.0.0.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), "123456"); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://127.0.0.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, "tbl1:10,tbl2:20,a.*:30,b.*:40,.*:50"); - String includingTables = "FULL_TYPES"; - String excludingTables = null; - String multiToOneOrigin = null; - String multiToOneTarget = null; - boolean ignoreDefaultValue = false; - boolean useNewSchemaChange = true; - boolean singleSink = false; - boolean ignoreIncompatible = false; - DatabaseSync databaseSync = new Db2DatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .setNewSchemaChange(useNewSchemaChange) - .setSingleSink(singleSink) - .setIgnoreIncompatible(ignoreIncompatible) - .create(); - databaseSync.build(); - env.execute(String.format("DB2-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMongoSyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMongoSyncDatabaseCase.java deleted file mode 100644 index ffc8a75d8..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMongoSyncDatabaseCase.java +++ /dev/null @@ -1,97 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.base.options.SourceOptions; -import org.apache.flink.cdc.connectors.mongodb.source.config.MongoDBSourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.mongodb.MongoDBDatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcMongoSyncDatabaseCase { - public static void main(String[] args) throws Exception { - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - Map flinkMap = new HashMap<>(); - flinkMap.put("execution.checkpointing.interval", "10s"); - flinkMap.put("pipeline.operator-chaining", "false"); - flinkMap.put("parallelism.default", "8"); - - String database = "cdc_test"; - String tablePrefix = ""; - String tableSuffix = ""; - - Configuration configuration = Configuration.fromMap(flinkMap); - env.configure(configuration); - Map mongoConfig = new HashMap<>(); - mongoConfig.put(MongoDBSourceOptions.DATABASE.key(), "test"); - mongoConfig.put(MongoDBSourceOptions.HOSTS.key(), "127.0.0.1:27017"); - mongoConfig.put(MongoDBSourceOptions.USERNAME.key(), "flinkuser"); - mongoConfig.put(MongoDBSourceOptions.PASSWORD.key(), "flinkpwd"); - // mongoConfig.put(SourceOptions.SCAN_STARTUP_MODE.key(), - // DorisCDCConfig.SCAN_STARTUP_MODE_VALUE_LATEST_OFFSET); - mongoConfig.put( - SourceOptions.SCAN_STARTUP_MODE.key(), - DatabaseSyncConfig.SCAN_STARTUP_MODE_VALUE_INITIAL); - mongoConfig.put("schema.sample-percent", "1"); - Configuration config = Configuration.fromMap(mongoConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "127.0.0.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), ""); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://127.0.0.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - sinkConfig.put(DorisConfigOptions.AUTO_REDIRECT.key(), "false"); - // sinkConfig.put(DorisConfigOptions.SINK_ENABLE_BATCH_MODE.key(),"true"); - // sinkConfig.put(DorisConfigOptions.SINK_WRITE_MODE.key(),"stream_load_batch"); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, ".*:1"); - String includingTables = "cdc_test"; - String excludingTables = ""; - String multiToOneOrigin = "a_.*|b_.*"; - String multiToOneTarget = "a|b"; - boolean ignoreDefaultValue = false; - DatabaseSync databaseSync = new MongoDBDatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .create(); - databaseSync.build(); - env.execute(String.format("Mongo-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMysqlSyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMysqlSyncDatabaseCase.java deleted file mode 100644 index e85e888fc..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcMysqlSyncDatabaseCase.java +++ /dev/null @@ -1,105 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.mysql.source.config.MySqlSourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.sink.schema.SchemaChangeMode; -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.mysql.MysqlDatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcMysqlSyncDatabaseCase { - - public static void main(String[] args) throws Exception { - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - // env.setParallelism(1); - - Map flinkMap = new HashMap<>(); - flinkMap.put("execution.checkpointing.interval", "10s"); - flinkMap.put("pipeline.operator-chaining", "false"); - flinkMap.put("parallelism.default", "1"); - - Configuration configuration = Configuration.fromMap(flinkMap); - env.configure(configuration); - - String database = "db1"; - String tablePrefix = ""; - String tableSuffix = ""; - Map mysqlConfig = new HashMap<>(); - mysqlConfig.put(MySqlSourceOptions.DATABASE_NAME.key(), "test"); - mysqlConfig.put(MySqlSourceOptions.HOSTNAME.key(), "127.0.0.1"); - mysqlConfig.put(MySqlSourceOptions.PORT.key(), "3306"); - mysqlConfig.put(MySqlSourceOptions.USERNAME.key(), "root"); - mysqlConfig.put(MySqlSourceOptions.PASSWORD.key(), "12345678"); - // add jdbc properties for MySQL - mysqlConfig.put("jdbc.properties.use_ssl", "false"); - Configuration config = Configuration.fromMap(mysqlConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "10.20.30.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), ""); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://10.20.30.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - sinkConfig.put("sink.enable-delete", "false"); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, "tbl1:10,tbl2:20,a.*:30,b.*:40,.*:50"); - // String includingTables = "tbl1|tbl2|tbl3"; - String includingTables = "a_.*|b_.*|c"; - String excludingTables = ""; - String multiToOneOrigin = "a_.*|b_.*"; - String multiToOneTarget = "a|b"; - boolean ignoreDefaultValue = false; - boolean useNewSchemaChange = true; - String schemaChangeMode = SchemaChangeMode.DEBEZIUM_STRUCTURE.getName(); - boolean singleSink = false; - boolean ignoreIncompatible = false; - DatabaseSync databaseSync = new MysqlDatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .setNewSchemaChange(useNewSchemaChange) - .setSchemaChangeMode(schemaChangeMode) - .setSingleSink(singleSink) - .setIgnoreIncompatible(ignoreIncompatible) - .create(); - databaseSync.build(); - env.execute(String.format("MySQL-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcOraclelSyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcOraclelSyncDatabaseCase.java deleted file mode 100644 index 92600ffd6..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcOraclelSyncDatabaseCase.java +++ /dev/null @@ -1,95 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.oracle.source.config.OracleSourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.oracle.OracleDatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcOraclelSyncDatabaseCase { - - public static void main(String[] args) throws Exception { - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.disableOperatorChaining(); - env.enableCheckpointing(10000); - - String database = "db1"; - String tablePrefix = ""; - String tableSuffix = ""; - Map sourceConfig = new HashMap<>(); - sourceConfig.put(OracleSourceOptions.DATABASE_NAME.key(), "XE"); - sourceConfig.put(OracleSourceOptions.SCHEMA_NAME.key(), "ADMIN"); - sourceConfig.put(OracleSourceOptions.HOSTNAME.key(), "127.0.0.1"); - sourceConfig.put(OracleSourceOptions.PORT.key(), "1521"); - sourceConfig.put(OracleSourceOptions.USERNAME.key(), "admin"); - sourceConfig.put(OracleSourceOptions.PASSWORD.key(), ""); - // sourceConfig.put("debezium.database.tablename.case.insensitive","false"); - sourceConfig.put("debezium.log.mining.strategy", "online_catalog"); - sourceConfig.put("debezium.log.mining.continuous.mine", "true"); - sourceConfig.put("debezium.database.history.store.only.captured.tables.ddl", "true"); - Configuration config = Configuration.fromMap(sourceConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "10.20.30.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), ""); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://10.20.30.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, "tbl1:10,tbl2:20,a.*:30,b.*:40,.*:50"); - String includingTables = "a_.*|b_.*|c"; - String excludingTables = ""; - String multiToOneOrigin = "a_.*|b_.*"; - String multiToOneTarget = "a|b"; - boolean ignoreDefaultValue = false; - boolean useNewSchemaChange = true; - boolean ignoreIncompatible = false; - DatabaseSync databaseSync = new OracleDatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .setNewSchemaChange(useNewSchemaChange) - .setIgnoreIncompatible(ignoreIncompatible) - .create(); - databaseSync.build(); - env.execute(String.format("Oracle-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcPostgresSyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcPostgresSyncDatabaseCase.java deleted file mode 100644 index 331840117..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcPostgresSyncDatabaseCase.java +++ /dev/null @@ -1,99 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.postgres.source.config.PostgresSourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.postgres.PostgresDatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcPostgresSyncDatabaseCase { - - public static void main(String[] args) throws Exception { - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.disableOperatorChaining(); - env.enableCheckpointing(10000); - - String database = "db2"; - String tablePrefix = ""; - String tableSuffix = ""; - Map sourceConfig = new HashMap<>(); - sourceConfig.put(PostgresSourceOptions.DATABASE_NAME.key(), "postgres"); - sourceConfig.put(PostgresSourceOptions.SCHEMA_NAME.key(), "public"); - sourceConfig.put(PostgresSourceOptions.SLOT_NAME.key(), "test"); - sourceConfig.put(PostgresSourceOptions.DECODING_PLUGIN_NAME.key(), "pgoutput"); - sourceConfig.put(PostgresSourceOptions.HOSTNAME.key(), "127.0.0.1"); - sourceConfig.put(PostgresSourceOptions.PG_PORT.key(), "5432"); - sourceConfig.put(PostgresSourceOptions.USERNAME.key(), "postgres"); - sourceConfig.put(PostgresSourceOptions.PASSWORD.key(), "123456"); - // add jdbc properties configuration - sourceConfig.put("jdbc.properties.ssl", "false"); - // sourceConfig.put("debezium.database.tablename.case.insensitive","false"); - // sourceConfig.put("scan.incremental.snapshot.enabled","true"); - // sourceConfig.put("debezium.include.schema.changes","false"); - - Configuration config = Configuration.fromMap(sourceConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "10.20.30.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), ""); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://10.20.30.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, "tbl1:10,tbl2:20,a.*:30,b.*:40,.*:50"); - String includingTables = "a_.*|b_.*|c"; - String excludingTables = ""; - String multiToOneOrigin = "a_.*|b_.*"; - String multiToOneTarget = "a|b"; - boolean ignoreDefaultValue = false; - boolean useNewSchemaChange = true; - boolean ignoreIncompatible = false; - DatabaseSync databaseSync = new PostgresDatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .setNewSchemaChange(useNewSchemaChange) - .setIgnoreIncompatible(ignoreIncompatible) - .create(); - databaseSync.build(); - env.execute(String.format("Postgres-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcSqlServerSyncDatabaseCase.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcSqlServerSyncDatabaseCase.java deleted file mode 100644 index 7a1cf276a..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcSqlServerSyncDatabaseCase.java +++ /dev/null @@ -1,98 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.cdc.connectors.base.options.JdbcSourceOptions; -import org.apache.flink.configuration.Configuration; -import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; - -import org.apache.doris.flink.table.DorisConfigOptions; -import org.apache.doris.flink.tools.cdc.sqlserver.SqlServerDatabaseSync; - -import java.util.HashMap; -import java.util.Map; -import java.util.UUID; - -public class CdcSqlServerSyncDatabaseCase { - - public static void main(String[] args) throws Exception { - - StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); - env.setParallelism(1); - env.disableOperatorChaining(); - env.enableCheckpointing(10000); - - String database = "db2"; - String tablePrefix = ""; - String tableSuffix = ""; - Map sourceConfig = new HashMap<>(); - sourceConfig.put(JdbcSourceOptions.DATABASE_NAME.key(), "CDC_DB"); - sourceConfig.put(JdbcSourceOptions.SCHEMA_NAME.key(), "dbo"); - sourceConfig.put(JdbcSourceOptions.HOSTNAME.key(), "127.0.0.1"); - sourceConfig.put(DatabaseSyncConfig.PORT, "1433"); - sourceConfig.put(JdbcSourceOptions.USERNAME.key(), "sa"); - sourceConfig.put(JdbcSourceOptions.PASSWORD.key(), "Passw@rd"); - // add jdbc properties configuration - sourceConfig.put("jdbc.properties.encrypt", "false"); - sourceConfig.put("jdbc.properties.integratedSecurity", "false"); - // sourceConfig.put("debezium.database.tablename.case.insensitive","false"); - // sourceConfig.put("scan.incremental.snapshot.enabled","true"); - // sourceConfig.put("debezium.include.schema.changes","false"); - - Configuration config = Configuration.fromMap(sourceConfig); - - Map sinkConfig = new HashMap<>(); - sinkConfig.put(DorisConfigOptions.FENODES.key(), "10.20.30.1:8030"); - sinkConfig.put(DorisConfigOptions.USERNAME.key(), "root"); - sinkConfig.put(DorisConfigOptions.PASSWORD.key(), ""); - sinkConfig.put(DorisConfigOptions.JDBC_URL.key(), "jdbc:mysql://10.20.30.1:9030"); - sinkConfig.put(DorisConfigOptions.SINK_LABEL_PREFIX.key(), UUID.randomUUID().toString()); - Configuration sinkConf = Configuration.fromMap(sinkConfig); - - Map tableConfig = new HashMap<>(); - tableConfig.put(DorisTableConfig.REPLICATION_NUM, "1"); - tableConfig.put(DorisTableConfig.TABLE_BUCKETS, "tbl1:10,tbl2:20,a.*:30,b.*:40,.*:50"); - String includingTables = "a_.*|b_.*|c"; - String excludingTables = ""; - String multiToOneOrigin = "a_.*|b_.*"; - String multiToOneTarget = "a|b"; - boolean ignoreDefaultValue = false; - boolean useNewSchemaChange = true; - boolean ignoreIncompatible = false; - DatabaseSync databaseSync = new SqlServerDatabaseSync(); - databaseSync - .setEnv(env) - .setDatabase(database) - .setConfig(config) - .setTablePrefix(tablePrefix) - .setTableSuffix(tableSuffix) - .setIncludingTables(includingTables) - .setExcludingTables(excludingTables) - .setMultiToOneOrigin(multiToOneOrigin) - .setMultiToOneTarget(multiToOneTarget) - .setIgnoreDefaultValue(ignoreDefaultValue) - .setSinkConfig(sinkConf) - .setTableConfig(tableConfig) - .setCreateTableOnly(false) - .setNewSchemaChange(useNewSchemaChange) - .setIgnoreIncompatible(ignoreIncompatible) - .create(); - databaseSync.build(); - env.execute(String.format("SqlServer-Doris Database Sync: %s", database)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcToolsTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcToolsTest.java deleted file mode 100644 index 41bcb6213..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/CdcToolsTest.java +++ /dev/null @@ -1,118 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.api.java.utils.MultipleParameterTool; - -import org.junit.Assert; -import org.junit.Test; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; - -public class CdcToolsTest { - - @Test - public void getConfigMapTest() { - MultipleParameterTool params = - MultipleParameterTool.fromArgs( - new String[] { - "--sink-conf", - "fenodes = 127.0.0.1:8030", - "--sink-conf", - "password=", - "--sink-conf", - "jdbc-url= jdbc:mysql://127.0.0.1:9030 ", - "--sink-conf", - "sink.label-prefix = label " - }); - Map sinkConf = CdcTools.getConfigMap(params, DatabaseSyncConfig.SINK_CONF); - - Map excepted = new HashMap<>(); - excepted.put("password", ""); - excepted.put("fenodes", "127.0.0.1:8030"); - excepted.put("jdbc-url", "jdbc:mysql://127.0.0.1:9030"); - excepted.put("sink.label-prefix", "label"); - Assert.assertEquals(sinkConf, excepted); - - Map mysqlConf = - CdcTools.getConfigMap(params, DatabaseSyncConfig.MYSQL_CONF); - Assert.assertNull(mysqlConf); - - MultipleParameterTool params2 = - MultipleParameterTool.fromArgs(new String[] {"--sink-conf", "fenodes"}); - Map sinkConf2 = - CdcTools.getConfigMap(params2, DatabaseSyncConfig.SINGLE_SINK); - Assert.assertNull(sinkConf2); - } - - @Test - public void testGetConfigMap() { - Map> config = new HashMap<>(); - config.put( - DatabaseSyncConfig.MYSQL_CONF, Arrays.asList(" hostname=127.0.0.1", " port=3306")); - config.put( - DatabaseSyncConfig.POSTGRES_CONF, - Arrays.asList("hostname=127.0.0.1 ", "port=5432 ")); - config.put( - DatabaseSyncConfig.SINK_CONF, - Arrays.asList(" fenodes=127.0.0.1:8030 ", " username=root")); - config.put(DatabaseSyncConfig.TABLE_CONF, Collections.singletonList(" replication_num=1")); - MultipleParameterTool parameter = MultipleParameterTool.fromMultiMap(config); - Map mysqlConfigMap = - CdcTools.getConfigMap(parameter, DatabaseSyncConfig.MYSQL_CONF); - Map postGresConfigMap = - CdcTools.getConfigMap(parameter, DatabaseSyncConfig.POSTGRES_CONF); - Map sinkConfigMap = - CdcTools.getConfigMap(parameter, DatabaseSyncConfig.SINK_CONF); - Map tableConfigMap = - CdcTools.getConfigMap(parameter, DatabaseSyncConfig.TABLE_CONF); - - Set mysqlKeyConf = new HashSet<>(Arrays.asList("hostname", "port")); - Set mysqlValueConf = new HashSet<>(Arrays.asList("127.0.0.1", "3306")); - assertEquals(mysqlConfigMap, mysqlKeyConf, mysqlValueConf); - - Set postgresKeyConf = new HashSet<>(Arrays.asList("hostname", "port")); - Set postgresValueConf = new HashSet<>(Arrays.asList("127.0.0.1", "5432")); - assertEquals(postGresConfigMap, postgresKeyConf, postgresValueConf); - - Set sinkKeyConf = new HashSet<>(Arrays.asList("fenodes", "username")); - Set sinkValueConf = new HashSet<>(Arrays.asList("127.0.0.1:8030", "root")); - assertEquals(sinkConfigMap, sinkKeyConf, sinkValueConf); - - Set tableKeyConf = new HashSet<>(Collections.singletonList("replication_num")); - Set tableValueConf = new HashSet<>(Collections.singletonList("1")); - assertEquals(tableConfigMap, tableKeyConf, tableValueConf); - } - - private void assertEquals( - Map actualMap, Set keyConf, Set valueConf) { - for (Entry entry : actualMap.entrySet()) { - String key = entry.getKey(); - String value = entry.getValue(); - Assert.assertTrue(keyConf.contains(key)); - Assert.assertTrue(valueConf.contains(value)); - } - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/DatabaseSyncTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/DatabaseSyncTest.java deleted file mode 100644 index 859a87208..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/DatabaseSyncTest.java +++ /dev/null @@ -1,269 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc; - -import org.apache.flink.configuration.Configuration; - -import org.apache.doris.flink.catalog.doris.TableSchema; -import org.apache.doris.flink.tools.cdc.db2.Db2DatabaseSync; -import org.apache.doris.flink.tools.cdc.mysql.MysqlDatabaseSync; -import org.apache.doris.flink.tools.cdc.postgres.PostgresDatabaseSync; -import org.apache.doris.flink.tools.cdc.sqlserver.SqlServerDatabaseSync; -import org.jetbrains.annotations.NotNull; -import org.junit.Assert; -import org.junit.Test; - -import java.sql.SQLException; -import java.util.Arrays; -import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - -/** Unit tests for the {@link DatabaseSync}. */ -public class DatabaseSyncTest { - @Test - public void multiToOneRulesParserTest() throws Exception { - String[][] testCase = { - {"a_.*|b_.*", "a|b"} // Normal condition - // ,{"a_.*|b_.*","a|b|c"} // Unequal length - // ,{"",""} // Null value - // ,{"***....","a"} // Abnormal regular expression - }; - DatabaseSync databaseSync = new MysqlDatabaseSync(); - Arrays.stream(testCase) - .forEach( - arr -> { - databaseSync.multiToOneRulesParser(arr[0], arr[1]); - }); - } - - @Test - public void getSyncTableListTest() throws Exception { - DatabaseSync databaseSync = new MysqlDatabaseSync(); - databaseSync.setSingleSink(false); - databaseSync.setIncludingTables("tbl_1|tbl_2"); - Configuration config = new Configuration(); - config.setString("database-name", "db"); - config.setString("table-name", "tbl.*"); - databaseSync.setConfig(config); - String syncTableList = databaseSync.getSyncTableList(Arrays.asList("tbl_1", "tbl_2")); - assertEquals("(db)\\.(tbl_1|tbl_2)", syncTableList); - } - - @Test - public void getTableBucketsTest() throws SQLException { - String tableBuckets = "tbl1:10,tbl2 : 20, a.* :30,b.*:40,.*:50"; - DatabaseSync databaseSync = new MysqlDatabaseSync(); - Map tableBucketsMap = DatabaseSync.getTableBuckets(tableBuckets); - assertEquals(10, tableBucketsMap.get("tbl1").intValue()); - assertEquals(20, tableBucketsMap.get("tbl2").intValue()); - assertEquals(30, tableBucketsMap.get("a.*").intValue()); - assertEquals(40, tableBucketsMap.get("b.*").intValue()); - assertEquals(50, tableBucketsMap.get(".*").intValue()); - } - - @Test - public void setTableSchemaBucketsTest() throws SQLException { - DatabaseSync databaseSync = new MysqlDatabaseSync(); - String tableSchemaBuckets = "tbl1:10,tbl2:20,a11.*:30,a1.*:40,b.*:50,b1.*:60,.*:70"; - Map tableBucketsMap = DatabaseSync.getTableBuckets(tableSchemaBuckets); - List tableList = - Arrays.asList( - "tbl1", "tbl2", "tbl3", "a11", "a111", "a12", "a13", "b1", "b11", "b2", - "c1", "d1"); - HashMap matchedTableBucketsMap = mockTableBuckets(); - Set tableSet = new HashSet<>(); - tableList.forEach( - tableName -> { - TableSchema tableSchema = new TableSchema(); - tableSchema.setTable(tableName); - databaseSync.setTableSchemaBuckets( - tableBucketsMap, tableSchema, tableName, tableSet); - assertEquals( - matchedTableBucketsMap.get(tableName), tableSchema.getTableBuckets()); - }); - } - - @Test - public void setTableSchemaBucketsTest1() throws SQLException { - DatabaseSync databaseSync = new MysqlDatabaseSync(); - String tableSchemaBuckets = ".*:10,a.*:20,tbl:30,b.*:40"; - Map tableBucketsMap = DatabaseSync.getTableBuckets(tableSchemaBuckets); - List tableList = Arrays.asList("a1", "a2", "a3", "b1", "a"); - HashMap matchedTableBucketsMap = mockTableBuckets1(); - Set tableSet = new HashSet<>(); - tableList.forEach( - tableName -> { - TableSchema tableSchema = new TableSchema(); - tableSchema.setTable(tableName); - databaseSync.setTableSchemaBuckets( - tableBucketsMap, tableSchema, tableName, tableSet); - assertEquals( - matchedTableBucketsMap.get(tableName), tableSchema.getTableBuckets()); - }); - } - - @NotNull - private static HashMap mockTableBuckets() { - HashMap matchedTableBucketsMap = new HashMap<>(); - matchedTableBucketsMap.put("tbl1", 10); - matchedTableBucketsMap.put("tbl2", 20); - matchedTableBucketsMap.put("a11", 30); - matchedTableBucketsMap.put("a111", 30); - matchedTableBucketsMap.put("a12", 40); - matchedTableBucketsMap.put("a13", 40); - matchedTableBucketsMap.put("b1", 50); - matchedTableBucketsMap.put("b11", 50); - matchedTableBucketsMap.put("b2", 50); - matchedTableBucketsMap.put("c1", 70); - matchedTableBucketsMap.put("d1", 70); - matchedTableBucketsMap.put("tbl3", 70); - return matchedTableBucketsMap; - } - - @NotNull - private static HashMap mockTableBuckets1() { - HashMap matchedTableBucketsMap = new HashMap<>(); - matchedTableBucketsMap.put("a", 10); - matchedTableBucketsMap.put("a1", 10); - matchedTableBucketsMap.put("a2", 10); - matchedTableBucketsMap.put("a3", 10); - matchedTableBucketsMap.put("b1", 10); - matchedTableBucketsMap.put("tbl1", 10); - return matchedTableBucketsMap; - } - - @Test - public void singleSinkTablePatternTest() throws SQLException { - DatabaseSync databaseSync = new MysqlDatabaseSync(); - databaseSync.setSingleSink(true); - databaseSync.setIncludingTables(".*"); - databaseSync.setExcludingTables("customer|dates|lineorder"); - Configuration config = new Configuration(); - config.setString("database-name", "ssb_test"); - databaseSync.setConfig(config); - List tableList = - Arrays.asList("customer", "dates", "lineorder", "test1", "test2", "test3"); - String syncTableListPattern = databaseSync.getSyncTableList(tableList); - assertTrue("ssb_test.test1".matches(syncTableListPattern)); - assertTrue("ssb_test.test2".matches(syncTableListPattern)); - assertTrue("ssb_test.test3".matches(syncTableListPattern)); - assertFalse("ssb_test.customer".matches(syncTableListPattern)); - assertFalse("ssb_test.dates".matches(syncTableListPattern)); - assertFalse("ssb_test.lineorder".matches(syncTableListPattern)); - } - - @Test - public void getJdbcPropertiesTest() throws Exception { - DatabaseSync databaseSync = new MysqlDatabaseSync(); - Map mysqlConfig = new HashMap<>(); - mysqlConfig.put("jdbc.properties.use_ssl", "false"); - - Configuration config = Configuration.fromMap(mysqlConfig); - databaseSync.setConfig(config); - Properties jdbcProperties = databaseSync.getJdbcProperties(); - Assert.assertEquals(1, jdbcProperties.size()); - Assert.assertEquals("false", jdbcProperties.getProperty("use_ssl")); - } - - @Test - public void getJdbcUrlTemplateTest() throws SQLException { - String mysqlJdbcTemplate = "jdbc:mysql://%s:%d?useInformationSchema=true"; - String postgresJdbcTemplate = "jdbc:postgresql://%s:%d/%s?"; - String sqlServerJdbcTemplate = "jdbc:sqlserver://%s:%d;database=%s;"; - String db2JdbcTemplate = "jdbc:db2://%s:%d/%s"; - - // mysql jdbc properties configuration - DatabaseSync mysqlDatabaseSync = new MysqlDatabaseSync(); - Map mysqlJdbcConfig = new LinkedHashMap<>(); - mysqlJdbcConfig.put("jdbc.properties.use_ssl", "false"); - - DatabaseSync postgresDatabaseSync = new PostgresDatabaseSync(); - Map postgresJdbcConfig = new LinkedHashMap<>(); - postgresJdbcConfig.put("jdbc.properties.ssl", "false"); - - DatabaseSync sqlServerDatabaseSync = new SqlServerDatabaseSync(); - Map sqlServerJdbcConfig = new LinkedHashMap<>(); - sqlServerJdbcConfig.put("jdbc.properties.encrypt", "false"); - sqlServerJdbcConfig.put("jdbc.properties.integratedSecurity", "false"); - - DatabaseSync db2DatabaseSync = new Db2DatabaseSync(); - Map db2JdbcConfig = new LinkedHashMap<>(); - db2JdbcConfig.put("jdbc.properties.ssl", "false"); - db2JdbcConfig.put("jdbc.properties.allowNextOnExhaustedResultSet", "1"); - db2JdbcConfig.put("jdbc.properties.resultSetHoldability", "1"); - - Configuration mysqlConfig = Configuration.fromMap(mysqlJdbcConfig); - mysqlDatabaseSync.setConfig(mysqlConfig); - - Configuration postgresConfig = Configuration.fromMap(postgresJdbcConfig); - postgresDatabaseSync.setConfig(postgresConfig); - - Configuration sqlServerConfig = Configuration.fromMap(sqlServerJdbcConfig); - sqlServerDatabaseSync.setConfig(sqlServerConfig); - - Configuration db2Config = Configuration.fromMap(db2JdbcConfig); - db2DatabaseSync.setConfig(db2Config); - - Properties mysqlJdbcProperties = mysqlDatabaseSync.getJdbcProperties(); - Assert.assertEquals(1, mysqlJdbcProperties.size()); - Assert.assertEquals("false", mysqlJdbcProperties.getProperty("use_ssl")); - String mysqlJdbcUrlTemplate = - mysqlDatabaseSync.getJdbcUrlTemplate(mysqlJdbcTemplate, mysqlJdbcProperties); - Assert.assertEquals(mysqlJdbcTemplate + "&use_ssl=false", mysqlJdbcUrlTemplate); - - Properties postgresJdbcProperties = postgresDatabaseSync.getJdbcProperties(); - Assert.assertEquals(1, postgresJdbcProperties.size()); - Assert.assertEquals("false", postgresJdbcProperties.getProperty("ssl")); - String postgresJdbcUrlTemplate = - postgresDatabaseSync.getJdbcUrlTemplate( - postgresJdbcTemplate, postgresJdbcProperties); - Assert.assertEquals(postgresJdbcTemplate + "&ssl=false", postgresJdbcUrlTemplate); - - Properties sqlServerJdbcProperties = sqlServerDatabaseSync.getJdbcProperties(); - Assert.assertEquals(2, sqlServerJdbcProperties.size()); - Assert.assertEquals("false", sqlServerJdbcProperties.getProperty("encrypt")); - Assert.assertEquals("false", sqlServerJdbcProperties.getProperty("integratedSecurity")); - String sqlServerJdbcUrlTemplate = - sqlServerDatabaseSync.getJdbcUrlTemplate( - sqlServerJdbcTemplate, sqlServerJdbcProperties); - Assert.assertEquals( - sqlServerJdbcTemplate + "encrypt=false;integratedSecurity=false;", - sqlServerJdbcUrlTemplate); - - Properties db2JdbcProperties = db2DatabaseSync.getJdbcProperties(); - Assert.assertEquals(3, db2JdbcProperties.size()); - Assert.assertEquals("false", db2JdbcProperties.getProperty("ssl")); - Assert.assertEquals("1", db2JdbcProperties.getProperty("allowNextOnExhaustedResultSet")); - Assert.assertEquals("1", db2JdbcProperties.getProperty("resultSetHoldability")); - String db2JdbcUrlTemplate = - db2DatabaseSync.getJdbcUrlTemplate(db2JdbcTemplate, db2JdbcProperties); - Assert.assertEquals( - db2JdbcTemplate - + ":allowNextOnExhaustedResultSet=1;ssl=false;resultSetHoldability=1;", - db2JdbcUrlTemplate); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchemaTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchemaTest.java deleted file mode 100644 index a9cc8bbc3..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBSchemaTest.java +++ /dev/null @@ -1,159 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.doris.flink.catalog.doris.FieldSchema; -import org.bson.Document; -import org.bson.types.Decimal128; -import org.junit.Test; - -import java.math.BigDecimal; -import java.util.ArrayList; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class MongoDBSchemaTest { - - @Test - public void convertToDorisType() {} - - @Test - public void getCdcTableName() throws Exception { - MongoDBSchema mongoDBSchema = - new MongoDBSchema(new ArrayList(), "db_TEST", "test_table", ""); - assertEquals("db_TEST\\.test_table", mongoDBSchema.getCdcTableName()); - } - - @Test - public void replaceDecimalTypeIfNeededTest1() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789.88888888)); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("DECIMAL(17,8)", fieldSchema.getTypeString()); - } - } - } - - @Test - public void replaceDecimalTypeIfNeededTest2() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789)); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("DECIMAL(15,6)", fieldSchema.getTypeString()); - } - } - } - - @Test - public void replaceDecimalTypeIfNeededTest3() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789)); - documents.add(new Document("fields1", 1234567.7777777)); - documents.add( - new Document("fields1", new Decimal128(new BigDecimal("12345679012.999999999")))); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("DECIMAL(20,9)", fieldSchema.getTypeString()); - } - } - } - - @Test - public void replaceDecimalTypeIfNeededTest4() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", "yes")); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789)); - documents.add(new Document("fields1", 1234567.7777777)); - documents.add( - new Document("fields1", new Decimal128(new BigDecimal("12345679012.999999999")))); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("STRING", fieldSchema.getTypeString()); - } - } - } - - @Test - public void replaceDecimalTypeIfNeededTest5() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789)); - documents.add(new Document("fields1", 1234567.7777777)); - documents.add(new Document("fields1", "yes")); - documents.add( - new Document("fields1", new Decimal128(new BigDecimal("12345679012.999999999")))); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("STRING", fieldSchema.getTypeString()); - } - } - } - - @Test - public void replaceDecimalTypeIfNeededTest6() throws Exception { - ArrayList documents = new ArrayList<>(); - documents.add(new Document("fields1", 1234567.666666)); - documents.add(new Document("fields1", 123456789)); - documents.add(new Document("fields1", 1234567.7777777)); - documents.add(new Document("fields1", 123444555433445L)); - documents.add( - new Document("fields1", new Decimal128(new BigDecimal("12345679012.999999999")))); - - MongoDBSchema mongoDBSchema = new MongoDBSchema(documents, "db_TEST", "test_table", ""); - Map fields = mongoDBSchema.getFields(); - for (Map.Entry entry : fields.entrySet()) { - FieldSchema fieldSchema = entry.getValue(); - String fieldName = entry.getKey(); - if (fieldName.equals("fields1")) { - assertEquals("DECIMAL(24,9)", fieldSchema.getTypeString()); - } - } - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBTypeTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBTypeTest.java deleted file mode 100644 index ee511ce24..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDBTypeTest.java +++ /dev/null @@ -1,139 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.apache.flink.api.java.tuple.Tuple2; - -import com.fasterxml.jackson.databind.node.BooleanNode; -import com.fasterxml.jackson.databind.node.DecimalNode; -import com.fasterxml.jackson.databind.node.DoubleNode; -import com.fasterxml.jackson.databind.node.IntNode; -import com.fasterxml.jackson.databind.node.JsonNodeFactory; -import com.fasterxml.jackson.databind.node.LongNode; -import com.fasterxml.jackson.databind.node.ObjectNode; -import com.fasterxml.jackson.databind.node.TextNode; -import org.apache.doris.flink.catalog.doris.DorisType; -import org.bson.BsonArray; -import org.bson.types.Decimal128; -import org.bson.types.ObjectId; -import org.junit.Test; - -import java.math.BigDecimal; -import java.util.Date; -import java.util.HashMap; - -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; - -public class MongoDBTypeTest { - - @Test - public void toDorisType() { - assertEquals(DorisType.INT, MongoDBType.toDorisType(new Integer(123))); - assertEquals(DorisType.DATETIME_V2 + "(3)", MongoDBType.toDorisType(new Date())); - assertEquals(DorisType.BIGINT, MongoDBType.toDorisType(new Long(1234567891))); - assertEquals("DECIMALV3(6,2)", MongoDBType.toDorisType(new Double("1234.56"))); - assertEquals(DorisType.BOOLEAN, MongoDBType.toDorisType(new Boolean(true))); - assertEquals(DorisType.STRING, MongoDBType.toDorisType("string")); - assertEquals( - DorisType.VARCHAR + "(30)", - MongoDBType.toDorisType(new ObjectId("66583533791a67a6f8c5a339"))); - assertEquals(DorisType.ARRAY, MongoDBType.toDorisType(new BsonArray())); - assertEquals( - "DECIMALV3(10,5)", - MongoDBType.toDorisType(new Decimal128(new BigDecimal("12345.55555")))); - } - - @Test - public void jsonNodeToDorisType() { - assertEquals(DorisType.INT, MongoDBType.jsonNodeToDorisType(new IntNode(1234))); - assertEquals(DorisType.STRING, MongoDBType.jsonNodeToDorisType(new TextNode("text"))); - assertEquals(DorisType.BIGINT, MongoDBType.jsonNodeToDorisType(new LongNode(1234568948))); - assertEquals(DorisType.DOUBLE, MongoDBType.jsonNodeToDorisType(new DoubleNode(1234.23))); - assertEquals(DorisType.BOOLEAN, MongoDBType.jsonNodeToDorisType(BooleanNode.TRUE)); - assertEquals( - DorisType.ARRAY, - MongoDBType.jsonNodeToDorisType(JsonNodeFactory.instance.arrayNode())); - assertEquals( - "DECIMALV3(6,2)", - MongoDBType.jsonNodeToDorisType(new DecimalNode(new BigDecimal("1234.23")))); - - ObjectNode dateJsonNodes = JsonNodeFactory.instance.objectNode(); - dateJsonNodes.put(MongoDBType.DATE_TYPE, ""); - assertEquals(DorisType.DATETIME_V2 + "(3)", MongoDBType.jsonNodeToDorisType(dateJsonNodes)); - - ObjectNode decimalJsonNodes = JsonNodeFactory.instance.objectNode(); - decimalJsonNodes.put(MongoDBType.DECIMAL_TYPE, "1234.23"); - assertEquals("DECIMALV3(6,2)", MongoDBType.jsonNodeToDorisType(decimalJsonNodes)); - - ObjectNode longJsonNodes = JsonNodeFactory.instance.objectNode(); - longJsonNodes.put(MongoDBType.LONG_TYPE, "1234234466"); - assertEquals(DorisType.BIGINT, MongoDBType.jsonNodeToDorisType(longJsonNodes)); - } - - @Test - public void getDecimalPrecisionAndScale() { - String decimalString1 = "DECIMAL(13,6)"; - String decimalString2 = "DECIMAL(20,10)"; - String decimalString3 = "DECIMAL(5,10)"; - String decimalString4 = "DECIMAL(10,5)"; - - Tuple2 decimalPrecision1 = - MongoDBType.getDecimalPrecisionAndScale(decimalString1); - assertArrayEquals( - new int[] {13, 6}, new int[] {decimalPrecision1.f0, decimalPrecision1.f1}); - - Tuple2 decimalPrecision2 = - MongoDBType.getDecimalPrecisionAndScale(decimalString2); - assertArrayEquals( - new int[] {20, 10}, new int[] {decimalPrecision2.f0, decimalPrecision2.f1}); - - Tuple2 decimalPrecision3 = - MongoDBType.getDecimalPrecisionAndScale(decimalString3); - assertArrayEquals( - new int[] {5, 10}, new int[] {decimalPrecision3.f0, decimalPrecision3.f1}); - - Tuple2 decimalPrecision4 = - MongoDBType.getDecimalPrecisionAndScale(decimalString4); - assertArrayEquals( - new int[] {10, 5}, new int[] {decimalPrecision4.f0, decimalPrecision4.f1}); - } - - @Test - public void checkAndRebuildBigDecimal() { - - HashMap decimalTestMap = - new HashMap() { - { - put("123456789.55555", "DECIMALV3(14,5)"); - put("123456789.666666", "DECIMALV3(15,6)"); - put("123456789.7777777", "DECIMALV3(16,7)"); - put("123456789.88888888", "DECIMALV3(17,8)"); - put("123456789.999999999", "DECIMALV3(18,9)"); - put("123456789.1", "DECIMALV3(10,1)"); - put("123456789.22", "DECIMALV3(11,2)"); - put("12345E4", "DECIMALV3(9,0)"); - put("0.12345E4", "DECIMALV3(5,1)"); - } - }; - - decimalTestMap.forEach( - (k, v) -> - assertEquals(MongoDBType.checkAndRebuildBigDecimal(new BigDecimal(k)), v)); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverterTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverterTest.java deleted file mode 100644 index 85608cc1b..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoDateConverterTest.java +++ /dev/null @@ -1,32 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class MongoDateConverterTest { - - @Test - public void convertTimestampToString() { - Long timestamp = 1717488217456L; - String formatStr = MongoDateConverter.convertTimestampToString(timestamp); - assertEquals("2024-06-04 16:03:37.456000", formatStr); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunctionTest.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunctionTest.java deleted file mode 100644 index e0c09b0fd..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/tools/cdc/mongodb/MongoParsingProcessFunctionTest.java +++ /dev/null @@ -1,35 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.tools.cdc.mongodb; - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; - -public class MongoParsingProcessFunctionTest { - - @Test - public void getRecordTableName() throws Exception { - String record = - "{\"_id\":\"{\\\"_id\\\": {\\\"$oid\\\": \\\"66583533791a67a6f8c5a339\\\"}}\",\"operationType\":\"insert\",\"fullDocument\":\"{\\\"_id\\\": {\\\"$oid\\\": \\\"66583533791a67a6f8c5a339\\\"}, \\\"key1\\\": \\\"value1\\\"}\",\"source\":{\"ts_ms\":0,\"snapshot\":\"true\"},\"ts_ms\":1717065582062,\"ns\":{\"db\":\"test\",\"coll\":\"cdc_test\"},\"to\":null,\"documentKey\":\"{\\\"_id\\\": {\\\"$oid\\\": \\\"66583533791a67a6f8c5a339\\\"}}\",\"updateDescription\":null,\"clusterTime\":null,\"txnNumber\":null,\"lsid\":null}"; - MongoParsingProcessFunction mongoParsingProcessFunction = - new MongoParsingProcessFunction(null); - String recordTableName = mongoParsingProcessFunction.getRecordTableName(record); - assertEquals("cdc_test", recordTableName); - } -} diff --git a/flink-doris-connector/src/test/java/org/apache/doris/flink/utils/DateToStringConverter.java b/flink-doris-connector/src/test/java/org/apache/doris/flink/utils/DateToStringConverter.java deleted file mode 100644 index a63228935..000000000 --- a/flink-doris-connector/src/test/java/org/apache/doris/flink/utils/DateToStringConverter.java +++ /dev/null @@ -1,155 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package org.apache.doris.flink.utils; - -import org.apache.flink.cdc.connectors.shaded.org.apache.kafka.connect.data.SchemaBuilder; - -import io.debezium.spi.converter.CustomConverter; -import io.debezium.spi.converter.RelationalColumn; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.sql.Timestamp; -import java.time.DateTimeException; -import java.time.Duration; -import java.time.LocalDate; -import java.time.LocalDateTime; -import java.time.LocalTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.Properties; -import java.util.function.Consumer; - -public class DateToStringConverter implements CustomConverter { - private static final Logger log = LoggerFactory.getLogger(DateToStringConverter.class); - private DateTimeFormatter dateFormatter = DateTimeFormatter.ISO_DATE; - private DateTimeFormatter timeFormatter = DateTimeFormatter.ISO_TIME; - private DateTimeFormatter datetimeFormatter = DateTimeFormatter.ISO_DATE_TIME; - private DateTimeFormatter timestampFormatter = DateTimeFormatter.ISO_DATE_TIME; - private ZoneId timestampZoneId = ZoneId.systemDefault(); - - public static final Properties DEFAULT_PROPS = new Properties(); - - static { - DEFAULT_PROPS.setProperty("converters", "date"); - DEFAULT_PROPS.setProperty( - "date.type", "org.apache.doris.flink.utils.DateToStringConverter"); - DEFAULT_PROPS.setProperty("date.format.date", "yyyy-MM-dd"); - DEFAULT_PROPS.setProperty("date.format.datetime", "yyyy-MM-dd HH:mm:ss"); - DEFAULT_PROPS.setProperty("date.format.timestamp", "yyyy-MM-dd HH:mm:ss"); - DEFAULT_PROPS.setProperty("date.format.timestamp.zone", "UTC"); - } - - @Override - public void configure(Properties props) { - readProps(props, "format.date", p -> dateFormatter = DateTimeFormatter.ofPattern(p)); - readProps(props, "format.time", p -> timeFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, "format.datetime", p -> datetimeFormatter = DateTimeFormatter.ofPattern(p)); - readProps( - props, - "format.timestamp", - p -> timestampFormatter = DateTimeFormatter.ofPattern(p)); - readProps(props, "format.timestamp.zone", z -> timestampZoneId = ZoneId.of(z)); - } - - private void readProps(Properties properties, String settingKey, Consumer callback) { - String settingValue = (String) properties.get(settingKey); - if (settingValue == null || settingValue.length() == 0) { - return; - } - try { - callback.accept(settingValue.trim()); - } catch (IllegalArgumentException | DateTimeException e) { - log.error("setting {} is illegal:{}", settingKey, settingValue); - throw e; - } - } - - @Override - public void converterFor( - RelationalColumn column, ConverterRegistration registration) { - String sqlType = column.typeName().toUpperCase(); - SchemaBuilder schemaBuilder = null; - Converter converter = null; - if ("DATE".equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDate; - } - if ("TIME".equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTime; - } - if ("DATETIME".equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertDateTime; - } - if ("TIMESTAMP".equals(sqlType)) { - schemaBuilder = SchemaBuilder.string().optional(); - converter = this::convertTimestamp; - } - if (schemaBuilder != null) { - registration.register(schemaBuilder, converter); - } - } - - private String convertDate(Object input) { - if (input instanceof LocalDate) { - return dateFormatter.format((LocalDate) input); - } else if (input instanceof Integer) { - LocalDate date = LocalDate.ofEpochDay((Integer) input); - return dateFormatter.format(date); - } - return null; - } - - private String convertTime(Object input) { - if (input instanceof Duration) { - Duration duration = (Duration) input; - long seconds = duration.getSeconds(); - int nano = duration.getNano(); - LocalTime time = LocalTime.ofSecondOfDay(seconds).withNano(nano); - return timeFormatter.format(time); - } - return null; - } - - private String convertDateTime(Object input) { - if (input instanceof LocalDateTime) { - return datetimeFormatter.format((LocalDateTime) input); - } else if (input instanceof Timestamp) { - return datetimeFormatter.format(((Timestamp) input).toLocalDateTime()); - } - return null; - } - - private String convertTimestamp(Object input) { - if (input instanceof ZonedDateTime) { - // mysql timestamp will be converted to UTC storage, - // and the zonedDatetime here is UTC time - ZonedDateTime zonedDateTime = (ZonedDateTime) input; - LocalDateTime localDateTime = - zonedDateTime.withZoneSameInstant(timestampZoneId).toLocalDateTime(); - return timestampFormatter.format(localDateTime); - } else if (input instanceof Timestamp) { - return timestampFormatter.format(((Timestamp) input).toLocalDateTime()); - } - return null; - } -}