Skip to content

Commit

Permalink
[enhancement][fix] update storage vault case for upgrade test (#43948)
Browse files Browse the repository at this point in the history
### What problem does this PR solve?

Issue Number: close #xxx

Related PR: #xxx

Problem Summary:
To adapt for the upgrade test, split this test case into two
  • Loading branch information
MoanasDaddyXu authored and Your Name committed Nov 20, 2024
1 parent 0b98983 commit 6445928
Show file tree
Hide file tree
Showing 2 changed files with 204 additions and 135 deletions.
204 changes: 204 additions & 0 deletions regression-test/suites/vault_p0/create/load.groovy
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.

suite("create_storage_vault", "nonConcurrent") {
if (!isCloudMode()) {
logger.info("skip test_create_vault case because not cloud mode")
return
}

if (!enableStoragevault()) {
logger.info("skip test_create_vault case")
return
}

expectExceptionLike({
sql """
CREATE STORAGE VAULT IF NOT EXISTS failed_vault
PROPERTIES (
"type"="S3",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "ssb_sf1_p2",
"hadoop.username" = "hadoop"
);
"""
}, "Missing [s3.endpoint] in properties")

expectExceptionLike({
sql """
CREATE STORAGE VAULT IF NOT EXISTS failed_vault
PROPERTIES (
"type"="hdfs",
"s3.bucket"="${getHmsHdfsFs()}",
"path_prefix" = "ssb_sf1_p2",
"hadoop.username" = "hadoop"
);
"""
}, "invalid fs_name")

expectExceptionLike({
sql """ CREATE STORAGE VAULT IF NOT EXISTS failed_vault PROPERTIES (); """
}, "mismatched input ')'")


sql """
CREATE STORAGE VAULT IF NOT EXISTS create_hdfs_vault
PROPERTIES (
"type"="hdfs",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "default_vault_ssb_hdfs_vault",
"hadoop.username" = "hadoop"
);
"""

try_sql """ DROP TABLE IF EXISTS create_table_use_vault FORCE; """

sql """
CREATE TABLE IF NOT EXISTS create_table_use_vault (
C_CUSTKEY INTEGER NOT NULL,
C_NAME INTEGER NOT NULL
)
DUPLICATE KEY(C_CUSTKEY, C_NAME)
DISTRIBUTED BY HASH(C_CUSTKEY) BUCKETS 1
PROPERTIES (
"replication_num" = "1",
"storage_vault_name" = "create_hdfs_vault"
)
"""

String create_stmt = sql """ SHOW CREATE TABLE create_table_use_vault """

logger.info("the create table stmt is ${create_stmt}")
assertTrue(create_stmt.contains("create_hdfs_vault"))

expectExceptionLike({
sql """
CREATE STORAGE VAULT create_hdfs_vault
PROPERTIES (
"type"="hdfs",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "default_vault_ssb_hdfs_vault"
);
"""
}, "already created")


sql """
CREATE STORAGE VAULT IF NOT EXISTS create_s3_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_create_s3_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""

expectExceptionLike({
sql """
CREATE STORAGE VAULT create_s3_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_create_s3_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""
}, "already created")

// sql """
// CREATE TABLE IF NOT EXISTS create_table_use_s3_vault (
// C_CUSTKEY INTEGER NOT NULL,
// C_NAME INTEGER NOT NULL
// )
// DUPLICATE KEY(C_CUSTKEY, C_NAME)
// DISTRIBUTED BY HASH(C_CUSTKEY) BUCKETS 1
// PROPERTIES (
// "replication_num" = "1",
// "storage_vault_name" = "create_s3_vault"
// )
// """

// sql """ insert into create_table_use_s3_vault values(1,1); """

// sql """ select * from create_table_use_s3_vault; """


// def vaults_info = try_sql """ show storage vault """


// boolean create_hdfs_vault_exist = false;
// boolean create_s3_vault_exist = false;
// boolean built_in_storage_vault_exist = false;
// for (int i = 0; i < vaults_info.size(); i++) {
// def name = vaults_info[i][0]
// if (name.equals("create_hdfs_vault")) {
// create_hdfs_vault_exist = true;
// }
// if (name.equals("create_s3_vault")) {
// create_s3_vault_exist = true;
// }
// if (name.equals("built_in_storage_vault")) {
// built_in_storage_vault_exist = true
// }
// }
// assertTrue(create_hdfs_vault_exist)
// assertTrue(create_s3_vault_exist)
// assertTrue(built_in_storage_vault_exist)

expectExceptionLike({
sql """
CREATE STORAGE VAULT built_in_storage_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_built_in_storage_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""
}, "already created")


// expectExceptionLike({
// sql """
// CREATE TABLE IF NOT EXISTS create_table_with_not_exist_vault (
// C_CUSTKEY INTEGER NOT NULL,
// C_NAME INTEGER NOT NULL
// )
// DUPLICATE KEY(C_CUSTKEY, C_NAME)
// DISTRIBUTED BY HASH(C_CUSTKEY) BUCKETS 1
// PROPERTIES (
// "replication_num" = "1",
// "storage_vault_name" = "not_exist_vault"
// )
// """
// }, "Storage vault 'not_exist_vault' does not exist")
}
135 changes: 0 additions & 135 deletions regression-test/suites/vault_p0/create/test_create_vault.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -26,109 +26,6 @@ suite("test_create_vault", "nonConcurrent") {
return
}

expectExceptionLike({
sql """
CREATE STORAGE VAULT IF NOT EXISTS failed_vault
PROPERTIES (
"type"="S3",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "ssb_sf1_p2",
"hadoop.username" = "hadoop"
);
"""
}, "Missing [s3.endpoint] in properties")

expectExceptionLike({
sql """
CREATE STORAGE VAULT IF NOT EXISTS failed_vault
PROPERTIES (
"type"="hdfs",
"s3.bucket"="${getHmsHdfsFs()}",
"path_prefix" = "ssb_sf1_p2",
"hadoop.username" = "hadoop"
);
"""
}, "invalid fs_name")

expectExceptionLike({
sql """ CREATE STORAGE VAULT IF NOT EXISTS failed_vault PROPERTIES (); """
}, "mismatched input ')'")


sql """
CREATE STORAGE VAULT IF NOT EXISTS create_hdfs_vault
PROPERTIES (
"type"="hdfs",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "default_vault_ssb_hdfs_vault",
"hadoop.username" = "hadoop"
);
"""

try_sql """ DROP TABLE IF EXISTS create_table_use_vault FORCE; """

sql """
CREATE TABLE IF NOT EXISTS create_table_use_vault (
C_CUSTKEY INTEGER NOT NULL,
C_NAME INTEGER NOT NULL
)
DUPLICATE KEY(C_CUSTKEY, C_NAME)
DISTRIBUTED BY HASH(C_CUSTKEY) BUCKETS 1
PROPERTIES (
"replication_num" = "1",
"storage_vault_name" = "create_hdfs_vault"
)
"""

String create_stmt = sql """ SHOW CREATE TABLE create_table_use_vault """

logger.info("the create table stmt is ${create_stmt}")
assertTrue(create_stmt.contains("create_hdfs_vault"))

expectExceptionLike({
sql """
CREATE STORAGE VAULT create_hdfs_vault
PROPERTIES (
"type"="hdfs",
"fs.defaultFS"="${getHmsHdfsFs()}",
"path_prefix" = "default_vault_ssb_hdfs_vault"
);
"""
}, "already created")


sql """
CREATE STORAGE VAULT IF NOT EXISTS create_s3_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_create_s3_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""

expectExceptionLike({
sql """
CREATE STORAGE VAULT create_s3_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_create_s3_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""
}, "already created")

sql """
CREATE TABLE IF NOT EXISTS create_table_use_s3_vault (
C_CUSTKEY INTEGER NOT NULL,
Expand Down Expand Up @@ -169,36 +66,4 @@ suite("test_create_vault", "nonConcurrent") {
assertTrue(create_s3_vault_exist)
assertTrue(built_in_storage_vault_exist)

expectExceptionLike({
sql """
CREATE STORAGE VAULT built_in_storage_vault
PROPERTIES (
"type"="S3",
"s3.endpoint"="${getS3Endpoint()}",
"s3.region" = "${getS3Region()}",
"s3.access_key" = "${getS3AK()}",
"s3.secret_key" = "${getS3SK()}",
"s3.root.path" = "test_built_in_storage_vault",
"s3.bucket" = "${getS3BucketName()}",
"s3.external_endpoint" = "",
"provider" = "${getS3Provider()}"
);
"""
}, "already created")


expectExceptionLike({
sql """
CREATE TABLE IF NOT EXISTS create_table_with_not_exist_vault (
C_CUSTKEY INTEGER NOT NULL,
C_NAME INTEGER NOT NULL
)
DUPLICATE KEY(C_CUSTKEY, C_NAME)
DISTRIBUTED BY HASH(C_CUSTKEY) BUCKETS 1
PROPERTIES (
"replication_num" = "1",
"storage_vault_name" = "not_exist_vault"
)
"""
}, "Storage vault 'not_exist_vault' does not exist")
}

0 comments on commit 6445928

Please sign in to comment.