diff --git a/src/azure_dfs_filesystem.cpp b/src/azure_dfs_filesystem.cpp index 0f95626..27966e3 100644 --- a/src/azure_dfs_filesystem.cpp +++ b/src/azure_dfs_filesystem.cpp @@ -24,7 +24,7 @@ const string AzureDfsStorageFileSystem::UNSECURE_SCHEME = "abfs"; const string AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX = "abfs://"; inline static bool IsDfsScheme(const string &fpath) { - return fpath.rfind("abfss://", 0) == 0; + return fpath.rfind(AzureDfsStorageFileSystem::PATH_PREFIX, 0) == 0 || fpath.rfind(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX, 0) == 0; } static void Walk(const Azure::Storage::Files::DataLake::DataLakeFileSystemClient &fs, const std::string &path, diff --git a/src/azure_parsed_url.cpp b/src/azure_parsed_url.cpp index b5ad702..ccc013a 100644 --- a/src/azure_parsed_url.cpp +++ b/src/azure_parsed_url.cpp @@ -13,7 +13,7 @@ AzureParsedUrl ParseUrl(const std::string &url) { std::string container, storage_account_name, endpoint, prefix, path; if (url.rfind("azure://", 0) != 0 && url.rfind("az://", 0) != 0 && - url.rfind(AzureDfsStorageFileSystem::PATH_PREFIX, 0) != 0) { + url.rfind(AzureDfsStorageFileSystem::PATH_PREFIX, 0) != 0 && url.rfind(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX, 0) != 0) { throw IOException("URL needs to start with azure:// or az:// or %s or %s", AzureDfsStorageFileSystem::PATH_PREFIX, AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX); diff --git a/src/azure_secret.cpp b/src/azure_secret.cpp index ba24951..e9f08f0 100644 --- a/src/azure_secret.cpp +++ b/src/azure_secret.cpp @@ -36,6 +36,7 @@ static unique_ptr CreateAzureSecretFromConfig(ClientContext &context scope.push_back("azure://"); scope.push_back("az://"); scope.push_back(AzureDfsStorageFileSystem::PATH_PREFIX); + scope.push_back(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX); } auto result = make_uniq(scope, input.type, input.provider, input.name); @@ -61,6 +62,7 @@ static unique_ptr CreateAzureSecretFromCredentialChain(ClientContext scope.push_back("azure://"); scope.push_back("az://"); scope.push_back(AzureDfsStorageFileSystem::PATH_PREFIX); + scope.push_back(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX); } auto result = make_uniq(scope, input.type, input.provider, input.name); @@ -85,6 +87,7 @@ static unique_ptr CreateAzureSecretFromServicePrincipal(ClientContex scope.push_back("azure://"); scope.push_back("az://"); scope.push_back(AzureDfsStorageFileSystem::PATH_PREFIX); + scope.push_back(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX); } auto result = make_uniq(scope, input.type, input.provider, input.name); @@ -114,6 +117,7 @@ static unique_ptr CreateAzureSecretFromAccessToken(ClientContext &co scope.push_back("azure://"); scope.push_back("az://"); scope.push_back(AzureDfsStorageFileSystem::PATH_PREFIX); + scope.push_back(AzureDfsStorageFileSystem::UNSECURE_PATH_PREFIX); } auto result = make_uniq(scope, input.type, input.provider, input.name); diff --git a/src/azure_storage_account_client.cpp b/src/azure_storage_account_client.cpp index a1825f0..e0e2293 100644 --- a/src/azure_storage_account_client.cpp +++ b/src/azure_storage_account_client.cpp @@ -591,8 +591,8 @@ ConnectToDfsStorageAccount(optional_ptr opener, const std::string &p if (!azure_parsed_url.is_fully_qualified) { throw InvalidInputException( - "Cannot identified the storage account from path '%s'. To connect anonymously to a " - "storage account easier a fully qualified path has to be provided or secret must be create.", + "Cannot identify the storage account from path '%s'. To connect anonymously to a " + "storage account easier a fully qualified path has to be provided or secret must be created.", path); } diff --git a/test/sql/cloud/hierarchical_namespace.test b/test/sql/cloud/hierarchical_namespace.test index 71f25eb..f9e59a3 100644 --- a/test/sql/cloud/hierarchical_namespace.test +++ b/test/sql/cloud/hierarchical_namespace.test @@ -5,26 +5,35 @@ # Require statement will ensure this test is run with this extension loaded require azure -require-env AZURE_TENANT_ID - -require-env AZURE_CLIENT_ID - -require-env AZURE_CLIENT_SECRET - +#require-env AZURE_TENANT_ID +# +#require-env AZURE_CLIENT_ID +# +#require-env AZURE_CLIENT_SECRET +# require-env AZURE_STORAGE_ACCOUNT +# +#statement ok +#set allow_persistent_secrets=false +# +#statement ok +#CREATE SECRET spn ( +# TYPE AZURE, +# PROVIDER SERVICE_PRINCIPAL, +# TENANT_ID '${AZURE_TENANT_ID}', +# CLIENT_ID '${AZURE_CLIENT_ID}', +# CLIENT_SECRET '${AZURE_CLIENT_SECRET}', +# ACCOUNT_NAME '${AZURE_STORAGE_ACCOUNT}' +#); -statement ok -set allow_persistent_secrets=false statement ok -CREATE SECRET spn ( +CREATE SECRET az1 ( TYPE AZURE, - PROVIDER SERVICE_PRINCIPAL, - TENANT_ID '${AZURE_TENANT_ID}', - CLIENT_ID '${AZURE_CLIENT_ID}', - CLIENT_SECRET '${AZURE_CLIENT_SECRET}', + PROVIDER CREDENTIAL_CHAIN, + CHAIN 'cli', ACCOUNT_NAME '${AZURE_STORAGE_ACCOUNT}' -); +) # Check that with Azure ADLS GEN2 directories are not show in globs query I @@ -102,6 +111,11 @@ EXPLAIN ANALYZE SELECT count(*) FROM 'abfss://testing-private/partitioned/l_rece ---- analyzed_plan :.*HTTP Stats.*in\: 322\.0 KiB.*\#HEAD\: 1.*GET\: 4.*PUT\: 0.*\#POST\: 0.* +query II +EXPLAIN ANALYZE SELECT count(*) FROM 'abfs://testing-private/partitioned/l_receipmonth=*7/l_shipmode=TRUCK/*.csv'; +---- +analyzed_plan :.*HTTP Stats.*in\: 322\.0 KiB.*\#HEAD\: 1.*GET\: 4.*PUT\: 0.*\#POST\: 0.* + query II EXPLAIN ANALYZE SELECT count(*) FROM 'azure://testing-private/partitioned/l_receipmonth=*7/l_shipmode=TRUCK/*.csv';