diff --git a/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy index 7be15f94243e7b..0e00cd8fb7a8bc 100644 --- a/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy +++ b/regression-test/suites/external_table_p0/paimon/paimon_base_filesystem.groovy @@ -29,6 +29,7 @@ suite("paimon_base_filesystem", "p0,external,doris,external_docker,external_dock String s3ak = getS3AK() String s3sk = getS3SK() + def s3Endpoint = getS3Endpoint() def cos = """select c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c18 from ${catalog_cos}.zd.all_table order by c18""" def oss = """select * from ${catalog_oss}.paimonossdb1.test_tableoss order by a""" @@ -48,9 +49,9 @@ suite("paimon_base_filesystem", "p0,external,doris,external_docker,external_dock create catalog if not exists ${catalog_oss} properties ( "type" = "paimon", "warehouse" = "oss://paimon-zd/paimonoss", - "oss.endpoint"="oss-cn-beijing.aliyuncs.com", "oss.access_key"="${ak}", - "oss.secret_key"="${sk}" + "oss.secret_key"="${sk}", + "oss.endpoint"="oss-cn-beijing.aliyuncs.com" ); """ logger.info("catalog " + catalog_cos + " created") diff --git a/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy b/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy index 87633ba1b09a5f..cf9fea67cbd685 100644 --- a/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy +++ b/regression-test/suites/external_table_p2/hive/test_hive_write_insert_s3.groovy @@ -17,6 +17,7 @@ suite("test_hive_write_insert_s3", "p2,external,hive,external_remote,external_remote_hive") { def format_compressions = ["parquet_snappy"] + def s3BucketName = getS3BucketName() def q01 = { String format_compression, String catalog_name -> logger.info("hive sql: " + """ truncate table all_types_${format_compression}_s3; """) @@ -76,8 +77,8 @@ suite("test_hive_write_insert_s3", "p2,external,hive,external_remote,external_re hive_remote """ DROP TABLE IF EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02; """ logger.info("hive sql: " + """ CREATE TABLE IF NOT EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02 like all_types_par_${format_compression}_s3; """) hive_remote """ CREATE TABLE IF NOT EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02 like all_types_par_${format_compression}_s3; """ - logger.info("hive sql: " + """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """) - hive_remote """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """ + logger.info("hive sql: " + """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """) + hive_remote """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """ sql """refresh catalog ${catalog_name};""" sql """ diff --git a/regression-test/suites/github_events_p2/load.groovy b/regression-test/suites/github_events_p2/load.groovy index dc2e0dbb97505c..92a588a2214b29 100644 --- a/regression-test/suites/github_events_p2/load.groovy +++ b/regression-test/suites/github_events_p2/load.groovy @@ -31,7 +31,7 @@ suite("load") { ak "${getS3AK()}" sk "${getS3SK()}" endpoint "http://${getS3Endpoint()}" - region "ap-beijing" + region "${getS3Region()}" repository "regression_test_github_events" snapshot "github_events" timestamp "2022-03-23-12-19-51" diff --git a/regression-test/suites/load_p0/broker_load/test_compress_type.groovy b/regression-test/suites/load_p0/broker_load/test_compress_type.groovy index 693e533fa5086f..723a07d5296b37 100644 --- a/regression-test/suites/load_p0/broker_load/test_compress_type.groovy +++ b/regression-test/suites/load_p0/broker_load/test_compress_type.groovy @@ -17,6 +17,7 @@ suite("test_compress_type", "load_p0") { def tableName = "basic_data" + def s3BucketName = getS3BucketName() // GZ/LZO/BZ2/LZ4FRAME/DEFLATE/LZOP def compressTypes = [ @@ -62,24 +63,24 @@ suite("test_compress_type", "load_p0") { ] def paths = [ - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2", - "s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.gz", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2", + "s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4", ] def labels = [] @@ -137,8 +138,8 @@ suite("test_compress_type", "load_p0") { WITH S3 ( "AWS_ACCESS_KEY" = "$ak", "AWS_SECRET_KEY" = "$sk", - "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com", - "AWS_REGION" = "ap-beijing", + "AWS_ENDPOINT" = "${getS3Endpoint()}", + "AWS_REGION" = "${getS3Region()}", "provider" = "${getS3Provider()}" ) """ diff --git a/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy b/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy index eea25fb453495f..291f623a512eac 100644 --- a/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy +++ b/regression-test/suites/load_p0/broker_load/test_csv_with_enclose_and_escapeS3_load.groovy @@ -19,6 +19,7 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") { def tableName = "test_csv_with_enclose_and_escape" + def s3BucketName = getS3BucketName() sql """ DROP TABLE IF EXISTS ${tableName} """ sql """ @@ -48,24 +49,24 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") { ] for (i in 0.. 50", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -720,14 +722,14 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "c1 as k00,c2 as k01,c3 as k02,c4 as k03,c5 as k04,c6 as k05,c7 as k06,c8 as k07,c9 as k08,c10 as k09,c11 as k10,c12 as k11,c13 as k12,c14 as k13,c15 as k14,c16 as k15,c17 as k16,c18 as k17,c19 as k18, to_bitmap(c6) as k19, HLL_HASH(c6) as k20, TO_QUANTILE_STATE(c5, 1.0) as k21, to_bitmap(c6) as kd19, HLL_HASH(c6) as kd20, TO_QUANTILE_STATE(c5, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"WHERE c1 > 50", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, ["k00", "k01", "k02", "k03", "k04", "k05", "k06", "k07", "k08", "k09", "k10", "k11", "k12", "k13", "k14", "k15", "k16", "k17"], "WHERE c1 > 50", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -735,7 +737,7 @@ suite("test_s3_tvf", "p2") { for(String table : uniqTable) { attributeList.add(new TvfAttribute(table, ["k00", "k01", "k02", "k03", "k04", "k05", "k06", "k07", "k08", "k09", "k10", "k11", "k12", "k13", "k14", "k15", "k16", "k17", "k18"], "", "ORDER BY c1") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -743,7 +745,7 @@ suite("test_s3_tvf", "p2") { for(String table : basicTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18","k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.parq") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.parq") .addProperty("format", "parquet") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -751,14 +753,14 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18, to_bitmap(k05) as k19, HLL_HASH(k05) as k20, TO_QUANTILE_STATE(k04, 1.0) as k21, to_bitmap(k05) as kd19, HLL_HASH(k05) as kd20, TO_QUANTILE_STATE(k04, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.parq") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.parq") .addProperty("format", "parquet") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data.parq") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data.parq") .addProperty("format", "parquet") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -766,7 +768,7 @@ suite("test_s3_tvf", "p2") { for(String table : basicTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18","k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.orc") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.orc") .addProperty("format", "orc") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -774,14 +776,14 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18, to_bitmap(k05) as k19, HLL_HASH(k05) as k20, TO_QUANTILE_STATE(k04, 1.0) as k21, to_bitmap(k05) as kd19, HLL_HASH(k05) as kd20, TO_QUANTILE_STATE(k04, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.orc") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.orc") .addProperty("format", "orc") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data.orc") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data.orc") .addProperty("format", "orc") .addProperty("column_separator", "|") .addProperty("force_parsing_by_standard_uri", "true")) @@ -789,7 +791,7 @@ suite("test_s3_tvf", "p2") { for(String table : basicTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18","k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.json") .addProperty("format", "json") .addProperty("read_json_by_line", "false") .addProperty("strip_outer_array", "true") @@ -799,7 +801,7 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18, to_bitmap(k05) as k19, HLL_HASH(k05) as k20, TO_QUANTILE_STATE(k04, 1.0) as k21, to_bitmap(k05) as kd19, HLL_HASH(k05) as kd20, TO_QUANTILE_STATE(k04, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.json") .addProperty("format", "json") .addProperty("read_json_by_line", "false") .addProperty("strip_outer_array", "true") @@ -808,7 +810,7 @@ suite("test_s3_tvf", "p2") { for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data.json") .addProperty("format", "json") .addProperty("read_json_by_line", "false") .addProperty("strip_outer_array", "true") @@ -818,7 +820,7 @@ suite("test_s3_tvf", "p2") { for(String table : basicTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18","k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data_by_line.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data_by_line.json") .addProperty("format", "json") .addProperty("read_json_by_line", "true") .addProperty("strip_outer_array", "false") @@ -828,7 +830,7 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18, to_bitmap(k05) as k19, HLL_HASH(k05) as k20, TO_QUANTILE_STATE(k04, 1.0) as k21, to_bitmap(k05) as kd19, HLL_HASH(k05) as kd20, TO_QUANTILE_STATE(k04, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data_by_line.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data_by_line.json") .addProperty("format", "json") .addProperty("read_json_by_line", "true") .addProperty("strip_outer_array", "false") @@ -837,7 +839,7 @@ suite("test_s3_tvf", "p2") { for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17", "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data_by_line.json") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data_by_line.json") .addProperty("format", "json") .addProperty("read_json_by_line", "true") .addProperty("strip_outer_array", "false") @@ -850,7 +852,7 @@ suite("test_s3_tvf", "p2") { // line_delimiter: \t for(String table : basicTables) { attributeList.add(new TvfAttribute(table, ["K00", "K01", "K02", "K03", "K04", "K05", "K06", "K07", "K08", "K09", "K10", "K11", "K12", "K13", "K14", "K15", "K16", "K17", "K18"], "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data_by_line_delimiter.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data_by_line_delimiter.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("line_delimiter", "\t") @@ -859,7 +861,7 @@ suite("test_s3_tvf", "p2") { attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "c1 as k00,c2 as k01,c3 as k02,c4 as k03,c5 as k04,c6 as k05,c7 as k06,c8 as k07,c9 as k08,c10 as k09,c11 as k10,c12 as k11,c13 as k12,c14 as k13,c15 as k14,c16 as k15,c17 as k16,c18 as k17,c19 as k18, to_bitmap(c6) as k19, HLL_HASH(c6) as k20, TO_QUANTILE_STATE(c5, 1.0) as k21, to_bitmap(c6) as kd19, HLL_HASH(c6) as kd20, TO_QUANTILE_STATE(c5, 1.0) as kd21", "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data_by_line_delimiter.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data_by_line_delimiter.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("line_delimiter", "\t") @@ -867,7 +869,7 @@ suite("test_s3_tvf", "p2") { for(String table : arrayTables) { attributeList.add(new TvfAttribute(table, ["K00", "K01", "K02", "K03", "K04", "K05", "K06", "K07", "K08", "K09", "K10", "K11", "K12", "K13", "K14", "K15", "K16", "K17"], "", "") - .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data_by_tab_line_delimiter.csv") + .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data_by_tab_line_delimiter.csv") .addProperty("format", "csv") .addProperty("column_separator", "|") .addProperty("line_delimiter", "\t") @@ -877,7 +879,7 @@ suite("test_s3_tvf", "p2") { // invalid line delimiter, this will case error // for(String table : basicTables) { // attributeList.add(new TvfAttribute(table, ["K00", "K01", "K02", "K03", "K04", "K05", "K06", "K07", "K08", "K09", "K10", "K11", "K12", "K13", "K14", "K15", "K16", "K17", "K18"], "", "") - // .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.csv") + // .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.csv") // .addProperty("format", "csv") // .addProperty("column_separator", "|") // .addProperty("line_delimiter", ",") @@ -886,7 +888,7 @@ suite("test_s3_tvf", "p2") { // attributeList.add(new TvfAttribute("agg_tbl_basic_tvf", "c1 as k00,c2 as k01,c3 as k02,c4 as k03,c5 as k04,c6 as k05,c7 as k06,c8 as k07,c9 as k08,c10 as k09,c11 as k10,c12 as k11,c13 as k12,c14 as k13,c15 as k14,c16 as k15,c17 as k16,c18 as k17,c19 as k18, to_bitmap(c6) as k19, HLL_HASH(c6) as k20, TO_QUANTILE_STATE(c5, 1.0) as k21, to_bitmap(c6) as kd19, HLL_HASH(c6) as kd20, TO_QUANTILE_STATE(c5, 1.0) as kd21", // "k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18,k19,k20,k21,kd19,kd20,kd21" ,"", "") - // .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_data.csv") + // .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_data.csv") // .addProperty("format", "csv") // .addProperty("column_separator", "|") // .addProperty("line_delimiter", ",") @@ -894,7 +896,7 @@ suite("test_s3_tvf", "p2") { // for(String table : arrayTables) { // attributeList.add(new TvfAttribute(table, ["K00", "K01", "K02", "K03", "K04", "K05", "K06", "K07", "K08", "K09", "K10", "K11", "K12", "K13", "K14", "K15", "K16", "K17"], "", "") - // .addProperty("uri", "s3://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/load/data/basic_array_data.csv") + // .addProperty("uri", "s3://${s3BucketName}.${s3Endpoint}/regression/load/data/basic_array_data.csv") // .addProperty("format", "csv") // .addProperty("column_separator", "|") // .addProperty("line_delimiter", ",") @@ -923,7 +925,7 @@ suite("test_s3_tvf", "p2") { FROM S3 ( "s3.access_key" = "$ak", "s3.secret_key" = "$sk", - "s3.region" = "ap-beijing", + "s3.region" = "${s3Region}", ${prop} ) ${attribute.whereClause} ${attribute.orderByClause} diff --git a/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy b/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy index 2167c40f1d2a38..df7ce235e85afa 100644 --- a/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy +++ b/regression-test/suites/partition_p2/auto_partition/diff_data/stress_test_diff_date_list.groovy @@ -28,7 +28,7 @@ suite("stress_test_diff_date_list", "p2,nonConcurrent") { // get doris-db from s3 def dirPath = context.file.parent def fileName = "doris-dbgen" - def fileUrl = "http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" + def fileUrl = "http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" def filePath = Paths.get(dirPath, fileName) if (!Files.exists(filePath)) { new URL(fileUrl).withInputStream { inputStream -> diff --git a/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy b/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy index fb400105758601..c8bbdfbffc434f 100644 --- a/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy +++ b/regression-test/suites/partition_p2/auto_partition/same_data/stress_test_same_date_range.groovy @@ -28,7 +28,7 @@ suite("stress_test_same_date_range", "p2,nonConcurrent") { // get doris-db from s3 def dirPath = context.file.parent def fileName = "doris-dbgen" - def fileUrl = "http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" + def fileUrl = "http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" def filePath = Paths.get(dirPath, fileName) if (!Files.exists(filePath)) { new URL(fileUrl).withInputStream { inputStream -> diff --git a/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy b/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy index 478ee0d71240e5..ecbb277c5fd90b 100644 --- a/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy +++ b/regression-test/suites/partition_p2/auto_partition/two_stream_load/stress_test_two_stream_load.groovy @@ -26,7 +26,7 @@ suite("stress_test_two_stream_load", "p2,nonConcurrent") { // get doris-db from s3 def dirPath = context.file.parent def fileName = "doris-dbgen" - def fileUrl = "http://doris-build-1308700295.cos.ap-beijing.myqcloud.com/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" + def fileUrl = "http://${getS3BucketName()}.${getS3Endpoint()}/regression/doris-dbgen-23-10-18/doris-dbgen-23-10-20/doris-dbgen" def filePath = Paths.get(dirPath, fileName) if (!Files.exists(filePath)) { new URL(fileUrl).withInputStream { inputStream -> diff --git a/regression-test/suites/query_profile/s3_load_profile_test.groovy b/regression-test/suites/query_profile/s3_load_profile_test.groovy index 33bdf48895a0d4..686e357d4e1395 100644 --- a/regression-test/suites/query_profile/s3_load_profile_test.groovy +++ b/regression-test/suites/query_profile/s3_load_profile_test.groovy @@ -29,6 +29,8 @@ def getProfile = { id -> // ref https://github.com/apache/doris/blob/3525a03815814f66ec78aa2ad6bbd9225b0e7a6b/regression-test/suites/load_p0/broker_load/test_s3_load.groovy suite('s3_load_profile_test') { + def s3Endpoint = getS3Endpoint() + def s3Region = getS3Region() sql "drop table if exists dup_tbl_basic;" sql """ CREATE TABLE dup_tbl_basic @@ -97,7 +99,7 @@ PROPERTIES ( "replication_num" = "1" ); """ - def loadAttribute =new LoadAttributes("s3://doris-build-1308700295/regression/load/data/basic_data.csv", + def loadAttribute =new LoadAttributes("s3://${getS3BucketName()}/regression/load/data/basic_data.csv", "dup_tbl_basic", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY \"|\"", "FORMAT AS \"CSV\"", "(k00,k01,k02,k03,k04,k05,k06,k07,k08,k09,k10,k11,k12,k13,k14,k15,k16,k17,k18)", "", "", "", "", "") @@ -128,8 +130,8 @@ PROPERTIES ( WITH S3 ( "AWS_ACCESS_KEY" = "$ak", "AWS_SECRET_KEY" = "$sk", - "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com", - "AWS_REGION" = "ap-beijing", + "AWS_ENDPOINT" = "${s3Endpoint}", + "AWS_REGION" = "${s3Region}", "use_path_style" = "$loadAttribute.usePathStyle", "provider" = "${getS3Provider()}" ) diff --git a/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy b/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy index a589a484d5ed15..ef927ddaf4329b 100644 --- a/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy +++ b/regression-test/suites/statistics/test_update_rows_and_partition_first_load.groovy @@ -16,7 +16,9 @@ // under the License. suite("test_update_rows_and_partition_first_load", "p2") { - + def s3BucketName = getS3BucketName() + def s3Endpoint = getS3Endpoint() + def s3Region = getS3Region() String ak = getS3AK() String sk = getS3SK() String enabled = context.config.otherConfigs.get("enableBrokerLoad") @@ -88,24 +90,24 @@ suite("test_update_rows_and_partition_first_load", "p2") { def label = "part_" + UUID.randomUUID().toString().replace("-", "0") sql """ LOAD LABEL ${label} ( - DATA INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_1.csv") + DATA INFILE("s3://${s3BucketName}/regression/load/data/update_rows_1.csv") INTO TABLE update_rows_test1 COLUMNS TERMINATED BY ",", - DATA INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_2.csv") + DATA INFILE("s3://${s3BucketName}/regression/load/data/update_rows_2.csv") INTO TABLE update_rows_test2 COLUMNS TERMINATED BY ",", - DATA INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_1.csv") + DATA INFILE("s3://${s3BucketName}/regression/load/data/update_rows_1.csv") INTO TABLE partition_test1 COLUMNS TERMINATED BY ",", - DATA INFILE("s3://doris-build-1308700295/regression/load/data/update_rows_2.csv") + DATA INFILE("s3://${s3BucketName}/regression/load/data/update_rows_2.csv") INTO TABLE partition_test2 COLUMNS TERMINATED BY "," ) WITH S3 ( "AWS_ACCESS_KEY" = "$ak", "AWS_SECRET_KEY" = "$sk", - "AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com", - "AWS_REGION" = "ap-beijing", + "AWS_ENDPOINT" = "${s3Endpoint}", + "AWS_REGION" = "${s3Region}", "provider" = "${getS3Provider()}" ); """ diff --git a/regression-test/suites/tpcds_sf1000_p2/load.groovy b/regression-test/suites/tpcds_sf1000_p2/load.groovy index aaf4fd54d71466..9bf888e93b0d7b 100644 --- a/regression-test/suites/tpcds_sf1000_p2/load.groovy +++ b/regression-test/suites/tpcds_sf1000_p2/load.groovy @@ -21,12 +21,13 @@ * */ suite("load") { + def s3Region = getS3Region() restore { location "s3://${getS3BucketName()}/regression/tpcds/sf1000" ak "${getS3AK()}" sk "${getS3SK()}" endpoint "http://${getS3Endpoint()}" - region "ap-beijing" + region "${s3Region}" repository "tpcds_backup" snapshot "tpcds_customer" timestamp "2022-03-31-10-16-46" @@ -40,7 +41,7 @@ suite("load") { ak "${getS3AK()}" sk "${getS3SK()}" endpoint "http://${getS3Endpoint()}" - region "ap-beijing" + region "${s3Region}" repository "tpcds_backup" snapshot "tpcds" timestamp "2022-03-30-12-22-31"