Skip to content

Commit

Permalink
Redshift Junits
Browse files Browse the repository at this point in the history
  • Loading branch information
Shubhangi-cs committed Dec 6, 2023
1 parent 6bc17b9 commit 0cd75b0
Show file tree
Hide file tree
Showing 6 changed files with 249 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,18 @@ public RedshiftConnectorConfig(String username, String password, String jdbcPlug
this.port = port;
}

public void setHost(String host) {
this.host = host;
}

public void setPort(@Nullable Integer port) {
this.port = port;
}

public void setDatabase(String database) {
this.database = database;
}

public String getDatabase() {
return database;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package io.cdap.plugin.amazon.redshift;

import com.google.common.annotations.VisibleForTesting;
import io.cdap.cdap.api.annotation.Description;
import io.cdap.cdap.api.annotation.Macro;
import io.cdap.cdap.api.annotation.Metadata;
Expand Down Expand Up @@ -108,6 +109,21 @@ public Map<String, String> getDBSpecificArguments() {
return Collections.emptyMap();
}

public void setUseConnection(@Nullable Boolean useConnection) {
this.useConnection = useConnection;
}

public void setConnection(@Nullable RedshiftConnectorConfig connection) {
this.connection = connection;
}

@VisibleForTesting
public RedshiftSourceConfig(@Nullable Boolean useConnection,
@Nullable RedshiftConnectorConfig connection) {
this.useConnection = useConnection;
this.connection = connection;
}

@Override
public Integer getFetchSize() {
Integer fetchSize = super.getFetchSize();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,19 @@ public void getTableNameTest() {
CONNECTOR.getTableName("db", "schema", "table"));
}

@Test
public void getRandomQuery() {
Assert.assertEquals("SELECT * FROM TestData\n" +
"TABLESAMPLE BERNOULLI (100.0 * 10 / (SELECT COUNT(*) FROM TestData))",
CONNECTOR.getRandomQuery("TestData", 10));
}

@Test
public void getDBRecordType() {
Assert.assertEquals("class io.cdap.plugin.amazon.redshift.RedshiftDBRecord",
CONNECTOR.getDBRecordType().toString());
}

/**
* Unit tests for getTableQuery()
*/
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
package io.cdap.plugin.amazon.redshift;

import com.google.common.collect.Lists;
import io.cdap.cdap.api.data.schema.Schema;

import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;

import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.List;

@RunWith(MockitoJUnitRunner.class)
public class RedshiftSchemaReaderTest {

@Test
public void testGetSchema() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();

ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("timetz");
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.TIMESTAMP);

Schema schema = schemaReader.getSchema(metadata, 1);

Assert.assertEquals(Schema.of(Schema.Type.STRING), schema);
}

@Test
public void testGetSchemaWithINTType() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("INT");
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
Schema schema = schemaReader.getSchema(metadata, 1);

Assert.assertEquals(Schema.of(Schema.Type.INT), schema);
}

@Test
public void testGetSchemaWithNumericTypeWithPrecision() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("STRING");
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
Mockito.when(metadata.getPrecision(1)).thenReturn(0);

Schema schema = schemaReader.getSchema(metadata, 1);

Assert.assertEquals(Schema.of(Schema.Type.STRING), schema);
}

@Test
public void testGetSchemaWithOtherTypes() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("BIGINT");
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.BIGINT);
Schema schema = schemaReader.getSchema(metadata, 1);

Assert.assertEquals(Schema.of(Schema.Type.LONG), schema);

Mockito.when(metadata.getColumnTypeName(2)).thenReturn("timestamp");
Mockito.when(metadata.getColumnType(2)).thenReturn(Types.TIMESTAMP);

schema = schemaReader.getSchema(metadata, 2);

Assert.assertEquals(Schema.of(Schema.LogicalType.DATETIME), schema);
}

@Test
public void testShouldIgnoreColumn() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader("sessionID");
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);
Mockito.when(metadata.getColumnName(1)).thenReturn("c_sessionID");
Assert.assertTrue(schemaReader.shouldIgnoreColumn(metadata, 1));
Mockito.when(metadata.getColumnName(2)).thenReturn("sqn_sessionID");
Assert.assertTrue(schemaReader.shouldIgnoreColumn(metadata, 2));
Mockito.when(metadata.getColumnName(3)).thenReturn("columnName");
Assert.assertFalse(schemaReader.shouldIgnoreColumn(metadata, 3));
}

@Test
public void testGetSchemaFields() throws SQLException {
RedshiftSchemaReader schemaReader = new RedshiftSchemaReader();

ResultSet resultSet = Mockito.mock(ResultSet.class);
ResultSetMetaData metadata = Mockito.mock(ResultSetMetaData.class);

Mockito.when(resultSet.getMetaData()).thenReturn(metadata);

// Mock two columns with different types
Mockito.when(metadata.getColumnCount()).thenReturn(2);
Mockito.when(metadata.getColumnTypeName(1)).thenReturn("INT");
Mockito.when(metadata.getColumnType(1)).thenReturn(Types.NUMERIC);
Mockito.when(metadata.getColumnName(1)).thenReturn("column1");

Mockito.when(metadata.getColumnTypeName(2)).thenReturn("BIGINT");
Mockito.when(metadata.getColumnType(2)).thenReturn(Types.BIGINT);
Mockito.when(metadata.getColumnName(2)).thenReturn("column2");

List<Schema.Field> expectedSchemaFields = Lists.newArrayList();
expectedSchemaFields.add(Schema.Field.of("column1", Schema.nullableOf(Schema.of(Schema.Type.INT))));
expectedSchemaFields.add(Schema.Field.of("column2", Schema.nullableOf(Schema.of(Schema.Type.LONG))));

List<Schema.Field> actualSchemaFields = schemaReader.getSchemaFields(resultSet);

Assert.assertEquals(expectedSchemaFields.get(0).getName(), actualSchemaFields.get(0).getName());
Assert.assertEquals(expectedSchemaFields.get(1).getName(), actualSchemaFields.get(1).getName());
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
package io.cdap.plugin.amazon.redshift;

import io.cdap.cdap.etl.api.batch.BatchSourceContext;
import io.cdap.plugin.common.LineageRecorder;
import io.cdap.plugin.db.SchemaReader;

import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;

import java.util.Map;

@RunWith(MockitoJUnitRunner.class)
public class RedshiftSourceTest {

@Test
public void testGetDBSpecificArguments() {
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
Map<String, String> dbSpecificArguments = config.getDBSpecificArguments();
Assert.assertEquals(0, dbSpecificArguments.size());
}

@Test
public void testGetFetchSize() {
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
Integer fetchSize = config.getFetchSize();
Assert.assertEquals(1000, fetchSize.intValue());

}

@Test
public void testGetSchemaReader() {
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource source = new RedshiftSource(new RedshiftSource.RedshiftSourceConfig(false, connectorConfig));
SchemaReader schemaReader = source.getSchemaReader();
Assert.assertTrue(schemaReader instanceof RedshiftSchemaReader);
}

@Test
public void testGetDBRecordType() {
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource source = new RedshiftSource(new RedshiftSource.RedshiftSourceConfig(false, connectorConfig));
Class<? extends DBWritable> dbRecordType = source.getDBRecordType();
Assert.assertEquals(RedshiftDBRecord.class, dbRecordType);
}

@Test
public void testCreateConnectionString() {
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
config.setConnection(new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101));
config.setUseConnection(false);
config.getConnection().setHost("localhost");
config.getConnection().setPort(5439);
config.getConnection().setDatabase("test");

RedshiftSource source = new RedshiftSource(config);
String connectionString = source.createConnectionString();
Assert.assertEquals("jdbc:redshift://localhost:5439/test", connectionString);
}

@Test
public void testGetLineageRecorder() {
BatchSourceContext context = Mockito.mock(BatchSourceContext.class);
RedshiftConnectorConfig connectorConfig = new RedshiftConnectorConfig("username", "password",
"jdbcPluginName", "connectionArguments", "host", "database", 1101);
RedshiftSource.RedshiftSourceConfig config = new RedshiftSource.RedshiftSourceConfig(false, connectorConfig);
config.getConnection().setHost("localhost");
config.getConnection().setPort(5439);
config.getConnection().setDatabase("test");
RedshiftSource source = new RedshiftSource(config);

LineageRecorder lineageRecorder = source.getLineageRecorder(context);
Assert.assertNotNull(lineageRecorder);
}
}



2 changes: 1 addition & 1 deletion amazon-redshift-plugin/widgets/Redshift-batchsource.json
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@
},
{
"widget-type": "textbox",
"label": "Split Column",
"label": "Split-By Field Name",
"name": "splitBy"
},
{
Expand Down

0 comments on commit 0cd75b0

Please sign in to comment.