Skip to content

Commit

Permalink
[test](fix) replace hardcode s3BucketName (apache#37739)
Browse files Browse the repository at this point in the history
## Proposed changes

Issue Number: close #xxx

<!--Describe your changes.-->

---------

Co-authored-by: stephen <[email protected]>
  • Loading branch information
hello-stephen and stephen authored Jul 15, 2024
1 parent 7919f07 commit b8b36c6
Show file tree
Hide file tree
Showing 21 changed files with 389 additions and 364 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ suite("paimon_base_filesystem", "p0,external,doris,external_docker,external_dock

String s3ak = getS3AK()
String s3sk = getS3SK()
def s3Endpoint = getS3Endpoint()

def cos = """select c1,c2,c3,c4,c5,c6,c7,c8,c9,c10,c11,c12,c13,c14,c15,c16,c18 from ${catalog_cos}.zd.all_table order by c18"""
def oss = """select * from ${catalog_oss}.paimonossdb1.test_tableoss order by a"""
Expand All @@ -48,9 +49,9 @@ suite("paimon_base_filesystem", "p0,external,doris,external_docker,external_dock
create catalog if not exists ${catalog_oss} properties (
"type" = "paimon",
"warehouse" = "oss://paimon-zd/paimonoss",
"oss.endpoint"="oss-cn-beijing.aliyuncs.com",
"oss.access_key"="${ak}",
"oss.secret_key"="${sk}"
"oss.secret_key"="${sk}",
"oss.endpoint"="oss-cn-beijing.aliyuncs.com"
);
"""
logger.info("catalog " + catalog_cos + " created")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

suite("test_hive_write_insert_s3", "p2,external,hive,external_remote,external_remote_hive") {
def format_compressions = ["parquet_snappy"]
def s3BucketName = getS3BucketName()

def q01 = { String format_compression, String catalog_name ->
logger.info("hive sql: " + """ truncate table all_types_${format_compression}_s3; """)
Expand Down Expand Up @@ -76,8 +77,8 @@ suite("test_hive_write_insert_s3", "p2,external,hive,external_remote,external_re
hive_remote """ DROP TABLE IF EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02; """
logger.info("hive sql: " + """ CREATE TABLE IF NOT EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02 like all_types_par_${format_compression}_s3; """)
hive_remote """ CREATE TABLE IF NOT EXISTS all_types_par_${format_compression}_s3_${catalog_name}_q02 like all_types_par_${format_compression}_s3; """
logger.info("hive sql: " + """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """)
hive_remote """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://doris-build-1308700295/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """
logger.info("hive sql: " + """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """)
hive_remote """ ALTER TABLE all_types_par_${format_compression}_s3_${catalog_name}_q02 SET LOCATION 'cosn://${s3BucketName}/regression/write/data/all_types_par_${format_compression}_s3_${catalog_name}_q02'; """
sql """refresh catalog ${catalog_name};"""

sql """
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/github_events_p2/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ suite("load") {
ak "${getS3AK()}"
sk "${getS3SK()}"
endpoint "http://${getS3Endpoint()}"
region "ap-beijing"
region "${getS3Region()}"
repository "regression_test_github_events"
snapshot "github_events"
timestamp "2022-03-23-12-19-51"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

suite("test_compress_type", "load_p0") {
def tableName = "basic_data"
def s3BucketName = getS3BucketName()

// GZ/LZO/BZ2/LZ4FRAME/DEFLATE/LZOP
def compressTypes = [
Expand Down Expand Up @@ -62,24 +63,24 @@ suite("test_compress_type", "load_p0") {
]

def paths = [
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data.csv.lz4",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.gz",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.bz2",
"s3://doris-build-1308700295/regression/load/data/basic_data_by_line.json.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.gz",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data.csv.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.gz",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.bz2",
"s3://${s3BucketName}/regression/load/data/basic_data_by_line.json.lz4",
]
def labels = []

Expand Down Expand Up @@ -137,8 +138,8 @@ suite("test_compress_type", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
"AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
"AWS_REGION" = "ap-beijing",
"AWS_ENDPOINT" = "${getS3Endpoint()}",
"AWS_REGION" = "${getS3Region()}",
"provider" = "${getS3Provider()}"
)
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") {

def tableName = "test_csv_with_enclose_and_escape"
def s3BucketName = getS3BucketName()

sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
Expand Down Expand Up @@ -48,24 +49,24 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") {
]

for (i in 0..<normalCases.size()) {
attributesList.add(new LoadAttributes("s3://doris-build-1308700295/regression/load/data/${normalCases[i]}.csv",
attributesList.add(new LoadAttributes("s3://${s3BucketName}/regression/load/data/${normalCases[i]}.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY \",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\", \"trim_double_quotes\" = \"true\")"))
}

attributesList.add(new LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_incomplete.csv",
attributesList.add(new LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_incomplete.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY \",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\", \"trim_double_quotes\" = \"true\")").addProperties("max_filter_ratio", "0.5"))

attributesList.add(new LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_without_escape.csv",
attributesList.add(new LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_without_escape.csv",
"${tableName}", "LINES TERMINATED BY \"\n\"", "COLUMNS TERMINATED BY \",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\", \"trim_double_quotes\" = \"true\")"))

attributesList.add(new LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_multi_char_delimiter.csv",
attributesList.add(new LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_multi_char_delimiter.csv",
"${tableName}", "LINES TERMINATED BY \"\$\$\$\"", "COLUMNS TERMINATED BY \"@@\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\", \"trim_double_quotes\" = \"true\")"))

attributesList.add(new LoadAttributes("s3://doris-build-1308700295/regression/load/data/enclose_not_trim_quotes.csv",
attributesList.add(new LoadAttributes("s3://${s3BucketName}/regression/load/data/enclose_not_trim_quotes.csv",
"${tableName}", "", "COLUMNS TERMINATED BY \",\"", "FORMAT AS \"CSV\"", "(k1,k2,v1,v2,v3,v4)",
"PROPERTIES (\"enclose\" = \"\\\"\", \"escape\" = \"\\\\\")").addProperties("trim_double_quotes", "false"))

Expand All @@ -92,8 +93,8 @@ suite("test_csv_with_enclose_and_escapeS3_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
"AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
"AWS_REGION" = "ap-beijing",
"AWS_ENDPOINT" = "${getS3Endpoint()}",
"AWS_REGION" = "${getS3Region()}",
"provider" = "${getS3Provider()}"
)
${prop}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
// under the License.

suite("test_etl_failed", "load_p0") {
def s3BucketName = getS3BucketName()
def s3Endpoint = getS3Endpoint()
def tableName = "test_etl_failed"
sql """ DROP TABLE IF EXISTS ${tableName} """
sql """
Expand All @@ -33,7 +35,7 @@ suite("test_etl_failed", "load_p0") {
PROPERTIES ("replication_allocation" = "tag.location.default: 1");
"""
String label = "test_etl_failed"
String path = "s3://doris-build-1308700295/regression/load/data/etl_failure/etl-failure.csv"
String path = "s3://${s3BucketName}/regression/load/data/etl_failure/etl-failure.csv"
String format = "CSV"
String ak = getS3AK()
String sk = getS3SK()
Expand All @@ -46,8 +48,8 @@ suite("test_etl_failed", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
"AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
"AWS_REGION" = "ap-beijing",
"AWS_ENDPOINT" = "${s3Endpoint}",
"AWS_REGION" = "${s3Region}",
"provider" = "${getS3Provider()}"
)
PROPERTIES(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,10 @@
// under the License.

suite("test_multi_table_load", "load_p0") {

def s3BucketName = getS3BucketName()
def s3Endpoint = getS3Endpoint()
def s3Region = getS3Region()

def tableName = "test_multi_table_load"

sql """ DROP TABLE IF EXISTS ${tableName} """
Expand Down Expand Up @@ -81,7 +84,7 @@ suite("test_multi_table_load", "load_p0") {
);
"""

def path = "s3://doris-build-1308700295/regression/load/data/basic_data.csv"
def path = "s3://${s3BucketName}/regression/load/data/basic_data.csv"
def format_str = "CSV"
def ak = getS3AK()
def sk = getS3SK()
Expand Down Expand Up @@ -129,8 +132,8 @@ suite("test_multi_table_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
"AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
"AWS_REGION" = "ap-beijing",
"AWS_ENDPOINT" = "${s3Endpoint}",
"AWS_REGION" = "${s3Region}",
"provider" = "${getS3Provider()}"
)
properties(
Expand Down Expand Up @@ -178,8 +181,8 @@ suite("test_multi_table_load", "load_p0") {
WITH S3 (
"AWS_ACCESS_KEY" = "$ak",
"AWS_SECRET_KEY" = "$sk",
"AWS_ENDPOINT" = "cos.ap-beijing.myqcloud.com",
"AWS_REGION" = "ap-beijing",
"AWS_ENDPOINT" = "${s3Endpoint}",
"AWS_REGION" = "${s3Region}",
"provider" = "${getS3Provider()}"
)
properties(
Expand Down
Loading

0 comments on commit b8b36c6

Please sign in to comment.