diff --git a/.github/workflows/build-report.yml b/.github/workflows/build-report.yml
new file mode 100644
index 0000000..2271363
--- /dev/null
+++ b/.github/workflows/build-report.yml
@@ -0,0 +1,50 @@
+# Copyright © 2024 Cask Data, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+# This workflow will build a Java project with Maven
+# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
+# Note: Any changes to this workflow would be used only after merging into develop
+name: Build Unit Tests Report
+
+on:
+ workflow_run:
+ workflows:
+ - Build with unit tests
+ types:
+ - completed
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+
+ if: ${{ github.event.workflow_run.conclusion != 'skipped' }}
+
+ steps:
+ # Pinned 1.0.0 version
+ - uses: marocchino/action-workflow_run-status@54b6e87d6cb552fc5f36dbe9a722a6048725917a
+
+ - name: Download artifact
+ uses: actions/download-artifact@v4
+ with:
+ github-token: ${{ secrets.GITHUB_TOKEN }}
+ run-id: ${{ github.event.workflow_run.id }}
+ path: artifacts/
+
+ - name: Surefire Report
+ # Pinned 3.5.2 version
+ uses: mikepenz/action-junit-report@16a9560bd02f11e7e3bf6b3e2ef6bba6c9d07c32
+ if: always()
+ with:
+ report_paths: '**/target/surefire-reports/TEST-*.xml'
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ detailed_summary: true
+ commit: ${{ github.event.workflow_run.head_sha }}
+ check_name: Build Test Report
\ No newline at end of file
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
new file mode 100644
index 0000000..ce71df5
--- /dev/null
+++ b/.github/workflows/main.yml
@@ -0,0 +1,59 @@
+# Copyright © 2024 Cask Data, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License"); you may not
+# use this file except in compliance with the License. You may obtain a copy of
+# the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations under
+# the License.
+
+# This workflow will build a Java project with Maven
+# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
+# Note: Any changes to this workflow would be used only after merging into develop
+name: Build with unit tests
+
+on:
+ push:
+ branches: [ develop, release/** ]
+ pull_request:
+ branches: [ develop, release/** ]
+ types: [opened, synchronize, reopened, labeled]
+
+jobs:
+ build:
+ runs-on: k8s-runner-build
+
+ # We allow builds:
+ # 1) When it's a merge into a branch
+ # 2) For PRs that are labeled as build and
+ # - It's a code change
+ # - A build label was just added
+ # A bit complex, but prevents builds when other labels are manipulated
+ if: >
+ github.event_name == 'push'
+ || (contains(github.event.pull_request.labels.*.name, 'build')
+ && (github.event.action != 'labeled' || github.event.label.name == 'build')
+ )
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ ref: ${{ github.event.workflow_run.head_sha }}
+ - name: Cache
+ uses: actions/cache@v3
+ with:
+ path: ~/.m2/repository
+ key: ${{ runner.os }}-maven-${{ github.workflow }}-${{ hashFiles('**/pom.xml') }}
+ restore-keys: |
+ ${{ runner.os }}-maven-${{ github.workflow }}
+ - name: Build with Maven
+ run: mvn clean test -fae -T 2 -B -V -DcloudBuild -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=25
+ - name: Archive build artifacts
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: reports-${{ github.run_id }}
+ path: |
+ **/target/rat.txt
+ **/target/surefire-reports/*
diff --git a/pom.xml b/pom.xml
index 7614413..5841f3e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,8 +29,8 @@
true
UTF-8
- 6.1.0-SNAPSHOT
- 2.3.0-SNAPSHOT
+ 6.4.1
+ 2.6.1
1.6
2.8.0
2.3.1
@@ -398,8 +398,8 @@
1.1.0
- system:cdap-data-pipeline[6.1.0-SNAPSHOT,7.0.0-SNAPSHOT)
- system:cdap-data-streams[6.1.0-SNAPSHOT,7.0.0-SNAPSHOT)
+ system:cdap-data-pipeline[6.4.1-SNAPSHOT,7.0.0-SNAPSHOT)
+ system:cdap-data-streams[6.4.1-SNAPSHOT,7.0.0-SNAPSHOT)
diff --git a/src/main/java/io/cdap/plugin/snowflake/common/util/QueryUtil.java b/src/main/java/io/cdap/plugin/snowflake/common/util/QueryUtil.java
index 72d52c9..289ddc1 100644
--- a/src/main/java/io/cdap/plugin/snowflake/common/util/QueryUtil.java
+++ b/src/main/java/io/cdap/plugin/snowflake/common/util/QueryUtil.java
@@ -22,7 +22,7 @@
public class QueryUtil {
// Matches "limit ". Also "limit $$$$" and "limit ''" which means unlimited in Snowflake.
- private static final String LIMIT_PATTERN = "(?i)LIMIT (''|\\$\\$\\$\\$|\\d+)";
+ private static final String LIMIT_PATTERN = "(?i)LIMIT (NULL|''|\\$\\$\\$\\$|\\d+)";
private static final String LIMIT_STRING = "limit %s";
private QueryUtil() {
diff --git a/src/main/java/io/cdap/plugin/snowflake/source/batch/SnowflakeBatchSource.java b/src/main/java/io/cdap/plugin/snowflake/source/batch/SnowflakeBatchSource.java
index 53abea9..3c6c8db 100644
--- a/src/main/java/io/cdap/plugin/snowflake/source/batch/SnowflakeBatchSource.java
+++ b/src/main/java/io/cdap/plugin/snowflake/source/batch/SnowflakeBatchSource.java
@@ -87,8 +87,7 @@ public void prepareRun(BatchSourceContext context) {
@Override
public void initialize(BatchRuntimeContext context) throws Exception {
super.initialize(context);
- SnowflakeSourceAccessor snowflakeAccessor = new SnowflakeSourceAccessor(config);
- Schema schema = SchemaHelper.getSchema(snowflakeAccessor, config.getImportQuery());
+ Schema schema = SchemaHelper.getSchema(config, context.getFailureCollector());
this.transformer = new SnowflakeMapToRecordTransformer(schema);
}
diff --git a/src/test/java/io/cdap/plugin/snowflake/common/BaseSnowflakeTest.java b/src/test/java/io/cdap/plugin/snowflake/common/BaseSnowflakeTest.java
index cd79627..24b6588 100644
--- a/src/test/java/io/cdap/plugin/snowflake/common/BaseSnowflakeTest.java
+++ b/src/test/java/io/cdap/plugin/snowflake/common/BaseSnowflakeTest.java
@@ -17,6 +17,7 @@
package io.cdap.plugin.snowflake.common;
import io.cdap.cdap.etl.mock.test.HydratorTestBase;
+import io.cdap.cdap.test.TestConfiguration;
import io.cdap.plugin.snowflake.Constants;
import io.cdap.plugin.snowflake.common.client.SnowflakeAccessorTest;
import io.cdap.plugin.snowflake.source.batch.SnowflakeBatchSourceConfig;
@@ -24,6 +25,7 @@
import net.snowflake.client.jdbc.SnowflakeBasicDataSource;
import org.junit.Assume;
import org.junit.BeforeClass;
+import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.internal.AssumptionViolatedException;
import org.junit.rules.TestName;
@@ -55,6 +57,9 @@ public abstract class BaseSnowflakeTest extends HydratorTestBase {
private static final Logger LOG = LoggerFactory.getLogger(SnowflakeAccessorTest.class);
+ @ClassRule
+ public static final TestConfiguration TEST_CONFIG = new TestConfiguration("explore.enabled", false);
+
protected static final String ACCOUNT_NAME = System.getProperty("snowflake.test.account.name");
protected static final String DATABASE = System.getProperty("snowflake.test.database");
protected static final String SCHEMA = System.getProperty("snowflake.test.schema");
diff --git a/src/test/java/io/cdap/plugin/snowflake/common/util/SchemaHelperTest.java b/src/test/java/io/cdap/plugin/snowflake/common/util/SchemaHelperTest.java
index 8c887c2..63bf0af 100644
--- a/src/test/java/io/cdap/plugin/snowflake/common/util/SchemaHelperTest.java
+++ b/src/test/java/io/cdap/plugin/snowflake/common/util/SchemaHelperTest.java
@@ -65,15 +65,16 @@ public void testGetSchemaInvalidJson() {
@Test
public void testGetSchemaFromSnowflakeUnknownType() throws IOException {
+ String importQuery = "SELECT * FROM someTable";
MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
SnowflakeSourceAccessor snowflakeAccessor = Mockito.mock(SnowflakeSourceAccessor.class);
List sample = new ArrayList<>();
sample.add(new SnowflakeFieldDescriptor("field1", -1000, false));
- Mockito.when(snowflakeAccessor.describeQuery(null)).thenReturn(sample);
+ Mockito.when(snowflakeAccessor.describeQuery(importQuery)).thenReturn(sample);
- SchemaHelper.getSchema(snowflakeAccessor, null, collector, null);
+ SchemaHelper.getSchema(snowflakeAccessor, null, collector, importQuery);
ValidationAssertions.assertValidationFailed(
collector, Collections.singletonList(SnowflakeBatchSourceConfig.PROPERTY_SCHEMA));
@@ -81,6 +82,7 @@ public void testGetSchemaFromSnowflakeUnknownType() throws IOException {
@Test
public void testGetSchemaFromSnowflake() throws IOException {
+ String importQuery = "SELECT * FROM someTable";
MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
SnowflakeSourceAccessor snowflakeAccessor = Mockito.mock(SnowflakeSourceAccessor.class);
@@ -142,9 +144,9 @@ public void testGetSchemaFromSnowflake() throws IOException {
Schema.Field.of("field134", Schema.nullableOf(Schema.of(Schema.LogicalType.TIMESTAMP_MICROS)))
);
- Mockito.when(snowflakeAccessor.describeQuery(null)).thenReturn(sample);
+ Mockito.when(snowflakeAccessor.describeQuery(importQuery)).thenReturn(sample);
- Schema actual = SchemaHelper.getSchema(snowflakeAccessor, null, collector, null);
+ Schema actual = SchemaHelper.getSchema(snowflakeAccessor, null, collector, importQuery);
Assert.assertTrue(collector.getValidationFailures().isEmpty());
Assert.assertEquals(expected, actual);
@@ -182,4 +184,21 @@ public void testGetSchemaWhenMacroIsEnabledSchemaIsNull() {
Assert.assertNull(actual);
}
+
+ @Test
+ public void testGetSchemaManuallyUpdatedTheSchema() {
+ Schema expected = Schema.recordOf("test",
+ Schema.Field.of("test_field", Schema.nullableOf(Schema.of(Schema.Type.LONG)))
+ );
+
+ SnowflakeBatchSourceConfig mockConfig = Mockito.mock(SnowflakeBatchSourceConfig.class);
+ Mockito.when(mockConfig.canConnect()).thenReturn(false);
+ Mockito.when(mockConfig.getSchema()).thenReturn(expected.toString());
+
+ MockFailureCollector collector = new MockFailureCollector(MOCK_STAGE);
+ Schema actual = SchemaHelper.getSchema(mockConfig, collector);
+
+ Assert.assertTrue(collector.getValidationFailures().isEmpty());
+ Assert.assertEquals(expected, actual);
+ }
}
diff --git a/src/test/java/io/cdap/plugin/snowflake/source/batch/SnowflakeMapToRecordTransformerTest.java b/src/test/java/io/cdap/plugin/snowflake/source/batch/SnowflakeMapToRecordTransformerTest.java
index c68e4ea..f81799d 100644
--- a/src/test/java/io/cdap/plugin/snowflake/source/batch/SnowflakeMapToRecordTransformerTest.java
+++ b/src/test/java/io/cdap/plugin/snowflake/source/batch/SnowflakeMapToRecordTransformerTest.java
@@ -50,8 +50,8 @@ public void transform() {
row.put("COLUMN_CHARACTER", "2");
row.put("COLUMN_STRING", "text_115");
row.put("COLUMN_TEXT", "text_116");
- row.put("COLUMN_BINARY", "text_117");
- row.put("COLUMN_VARBINARY", "text_118");
+ row.put("COLUMN_BINARY", "746578745f313137");
+ row.put("COLUMN_VARBINARY", "746578745f313138");
row.put("COLUMN_BOOLEAN", "true");
row.put("COLUMN_DATE", "2019-01-01");
row.put("COLUMN_DATETIME", "2019-01-01T01:01:01+00:00");