Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DataTypes conversion for the orderBy tag. #23

Open
wants to merge 15 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.jboss.arquillian.persistence.core.data.descriptor;

import org.jboss.arquillian.persistence.core.data.script.ScriptLoader;
import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;

/**
*
Expand All @@ -35,7 +36,7 @@ public FileSqlScriptResourceDescriptor(String location)
}

@Override
public String getContent()
public String getContent(DBUnitConfiguration configuration)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should avoid passing objects to this method(s). For instance now we made sql script (and all other classes in this hierarchy) code dependent on DBUnit. I would rather pass configuration (or relevant part of it) during construction of relevant object instead.

Why do we need it btw? To obtain DataTypeFactory class?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I just need the DataTypeFactory, correct.

Regards

Cyrill

Am 03.12.2012 um 11:40 schrieb Bartosz Majsak [email protected]:

In impl/src/main/java/org/jboss/arquillian/persistence/core/data/descriptor/FileSqlScriptResourceDescriptor.java:

@@ -35,7 +36,7 @@ public FileSqlScriptResourceDescriptor(String location)
}

@Override
  • public String getContent()
  • public String getContent(DBUnitConfiguration configuration)
    I think we should avoid passing objects to this method(s). For instance now we made sql script (and all other classes in this hierarchy) code dependent on DBUnit. I would rather pass configuration (or relevant part of it) during construction of relevant object instead.

Why do we need it btw? To obtain DataTypeFactory class?


Reply to this email directly or view it on GitHub.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we should strive for something less intrusive here. I will have a look at what DBUnit has to offer for this case after work. Thanks for great job so far!

{
return ScriptLoader.loadScript(getLocation());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.jboss.arquillian.persistence.core.data.descriptor;

import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;

/**
*
* Inline SQL script descriptor.
Expand All @@ -36,7 +38,7 @@ public InlineSqlScriptResourceDescriptor(String content)
}

@Override
public String getContent()
public String getContent(DBUnitConfiguration configuration)
{
return content;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.jboss.arquillian.persistence.core.data.descriptor;

import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;

/**
* Describes resource attributes such as it's location in classpath
* and format.
Expand All @@ -35,7 +37,7 @@ public ResourceDescriptor(String location)
this.location = location;
}

public abstract T getContent();
public abstract T getContent(DBUnitConfiguration configuration);

public String getLocation()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
*/
package org.jboss.arquillian.persistence.core.data.descriptor;

import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;

import java.io.InputStream;
import java.util.Scanner;

Expand All @@ -36,7 +38,7 @@ public TextFileResourceDescriptor(String location)
}

@Override
public String getContent()
public String getContent(DBUnitConfiguration configuration)
{
final InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(getLocation());
return new Scanner(inputStream).useDelimiter("\\A").next();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ public void cleanupUsingScript(@Observes CleanupDataUsingScript cleanupDataUsing
{
for (SqlScriptResourceDescriptor scriptDescriptor : cleanupDataUsingScriptEvent.getDescriptors())
{
final String script = scriptDescriptor.getContent();
final String script = scriptDescriptor.getContent(dbunitConfigurationInstance.get());
executeScript(script);
}
}
Expand All @@ -124,7 +124,7 @@ public void executeScripts(@Observes ExecuteScripts executeScriptsEvent)
{
for (SqlScriptResourceDescriptor scriptDescriptor : executeScriptsEvent.getDescriptors())
{
final String script = scriptDescriptor.getContent();
final String script = scriptDescriptor.getContent(dbunitConfigurationInstance.get());
executeScript(script);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.persistence.dbunit;
package org.jboss.arquillian.persistence.dbunit;

import java.lang.reflect.Method;
import java.sql.Connection;
import java.util.Collection;
Expand Down Expand Up @@ -48,36 +48,36 @@
import org.jboss.arquillian.test.spi.annotation.ClassScoped;
import org.jboss.arquillian.test.spi.annotation.TestScoped;
import org.jboss.arquillian.test.spi.event.suite.TestEvent;
/**
*
* @author Bartosz Majsak
*
*/
public class DBUnitPersistenceTestLifecycleHandler
{
@Inject
private Instance<DataSource> dataSourceInstance;

/**
*
* @author Bartosz Majsak
*
*/
public class DBUnitPersistenceTestLifecycleHandler
{

@Inject
private Instance<DataSource> dataSourceInstance;

@Inject
private Instance<MetadataExtractor> metadataExtractorInstance;

@Inject
private Instance<DBUnitConfiguration> dbUnitConfigurationInstance;
@Inject @ClassScoped
private InstanceProducer<DatabaseConnection> databaseConnectionProducer;
@Inject @TestScoped

@Inject @ClassScoped
private InstanceProducer<DatabaseConnection> databaseConnectionProducer;

@Inject @TestScoped
private InstanceProducer<DataSetRegister> dataSetRegisterProducer;

@Inject
private Instance<PersistenceExtensionFeatureResolver> persistenceExtensionFeatureResolverInstance;
private Instance<PersistenceExtensionFeatureResolver> persistenceExtensionFeatureResolverInstance;

// ------------------------------------------------------------------------------------------------
// Intercepting data handling events
// ------------------------------------------------------------------------------------------------
// ------------------------------------------------------------------------------------------------
// Intercepting data handling events
// ------------------------------------------------------------------------------------------------

public void provideDatabaseConnectionAroundBeforePersistenceTest(@Observes(precedence = 100000) EventContext<BeforePersistenceTest> context)
{
Expand All @@ -89,7 +89,7 @@ public void provideDatabaseConnectionAroundAfterPersistenceTest(@Observes(preced
provideDatabaseConnection(context);
}

public void createDatasets(@Observes(precedence = 1000) EventContext<BeforePersistenceTest> context)
public void createDatasets(@Observes(precedence = 1000) EventContext<BeforePersistenceTest> context)
{
final Method testMethod = context.getEvent().getTestMethod();

Expand All @@ -105,7 +105,7 @@ public void createDatasets(@Observes(precedence = 1000) EventContext<BeforePersi
createExpectedDataSets(dataSetProvider.getDescriptorsDefinedFor(testMethod));
}

context.proceed();
context.proceed();
}

// ------------------------------------------------------------------------------------------------
Expand All @@ -122,7 +122,7 @@ private void provideDatabaseConnection(EventContext<? extends TestEvent> context
closeDatabaseConnection();
}
}

private void createDatabaseConnection()
{

Expand Down Expand Up @@ -160,7 +160,7 @@ private void createDatabaseConnection()
{
throw new DBUnitInitializationException("Unable to initialize database connection for DBUnit module.", e);
}
}
}

private void closeDatabaseConnection()
{
Expand All @@ -177,36 +177,36 @@ private void closeDatabaseConnection()
throw new DBUnitConnectionException("Unable to close connection.", e);
}
}
private void createInitialDataSets(Collection<DataSetResourceDescriptor> dataSetDescriptors)
{
DataSetRegister dataSetRegister = getOrCreateDataSetRegister();
for (DataSetResourceDescriptor dataSetDescriptor : dataSetDescriptors)
{
dataSetRegister.addInitial(dataSetDescriptor.getContent());
}
dataSetRegisterProducer.set(dataSetRegister);
}
private void createExpectedDataSets(Collection<DataSetResourceDescriptor> dataSetDescriptors)
{
DataSetRegister dataSetRegister = getOrCreateDataSetRegister();
for (DataSetResourceDescriptor dataSetDescriptor : dataSetDescriptors)
{
dataSetRegister.addExpected(dataSetDescriptor.getContent());
}
dataSetRegisterProducer.set(dataSetRegister);
}
private DataSetRegister getOrCreateDataSetRegister()
{
DataSetRegister dataSetRegister = dataSetRegisterProducer.get();
if (dataSetRegister == null)
{
dataSetRegister = new DataSetRegister();
}
return dataSetRegister;

private void createInitialDataSets(Collection<DataSetResourceDescriptor> dataSetDescriptors)
{
DataSetRegister dataSetRegister = getOrCreateDataSetRegister();
for (DataSetResourceDescriptor dataSetDescriptor : dataSetDescriptors)
{
dataSetRegister.addInitial(dataSetDescriptor.getContent(dbUnitConfigurationInstance.get()));
}
dataSetRegisterProducer.set(dataSetRegister);
}

private void createExpectedDataSets(Collection<DataSetResourceDescriptor> dataSetDescriptors)
{
DataSetRegister dataSetRegister = getOrCreateDataSetRegister();
for (DataSetResourceDescriptor dataSetDescriptor : dataSetDescriptors)
{
dataSetRegister.addExpected(dataSetDescriptor.getContent(dbUnitConfigurationInstance.get()));
}
dataSetRegisterProducer.set(dataSetRegister);
}

private DataSetRegister getOrCreateDataSetRegister()
{
DataSetRegister dataSetRegister = dataSetRegisterProducer.get();
if (dataSetRegister == null)
{
dataSetRegister = new DataSetRegister();
}
return dataSetRegister;
}


}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,7 @@
import static org.jboss.arquillian.persistence.dbunit.DataSetUtils.extractColumnsNotSpecifiedInExpectedDataSet;
import static org.jboss.arquillian.persistence.dbunit.DataSetUtils.extractNonExistingColumns;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.*;
import java.util.logging.Logger;

import org.dbunit.Assertion;
Expand Down Expand Up @@ -144,17 +140,39 @@ private List<String> defineColumnsForSorting(IDataSet currentDataSet, IDataSet e
throws DataSetException
{
final List<String> columnsForSorting = new ArrayList<String>();
columnsForSorting.addAll(orderBy.global);
columnsForSorting.addAll(existingColumnsForTable(expectedDataSet, tableName, orderBy.global));
final List<String> columsPerTable = orderBy.columnsPerTable.get(tableName);
if (columsPerTable != null)
{
columnsForSorting.addAll(columsPerTable);
}
columnsForSorting.addAll(additionalColumnsForSorting(expectedDataSet.getTable(tableName),
currentDataSet.getTable(tableName)));

// Add all columns if none are defined in orderBy
if (columnsForSorting.isEmpty()) {
columnsForSorting.addAll(additionalColumnsForSorting(expectedDataSet.getTable(tableName),
currentDataSet.getTable(tableName)));
}

return columnsForSorting;
}

private Collection<String> existingColumnsForTable(IDataSet set, String tableName, List<String> expectedColumns) throws DataSetException
{
Set<String> existingColumns = new HashSet<String>();

Collection<String> tableColumns = extractColumnNames(set.getTable(tableName).getTableMetaData().getColumns());

for (String currentColumn : expectedColumns) {
if (tableColumns.contains(currentColumn.toLowerCase())) {
existingColumns.add(currentColumn.toLowerCase());
} else {
log.warning("Column " + currentColumn.toLowerCase() + " in Table " + tableName + " was set to be an ordering column but does not exist in this table. Ignoring it. Consider using [TABLENAME.COLUMNNAME]");
}
}

return existingColumns;
}

private static <T> String[] toArray(final List<T> list)
{
return list.toArray(new String[list.size()]);
Expand All @@ -180,7 +198,8 @@ private List<String> additionalColumnsForSorting(final ITable expectedTableState
}
catch (DataSetException e)
{
throw new DBUnitDataSetHandlingException("Unable to resolve columns", e);
throw new DBUnitDataSetHandlingException("Unable to resolve columns in table " +
expectedTableState.getTableMetaData().getTableName(), e);
}

return columnsForSorting;
Expand All @@ -207,7 +226,7 @@ private List<String> extractColumnsToBeIgnored(final ITable expectedTableState,
if (!nonExistingColumns.isEmpty())
{
log.warning("Columns which are specified to be filtered out [" + Arrays.toString(nonExistingColumns.toArray())
+ "] are not existing in the table.");
+ "] are not existing in the table " + expectedTableState.getTableMetaData().getTableName());
}
return columnsToIgnore;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import org.dbunit.dataset.IDataSet;
import org.jboss.arquillian.persistence.core.data.descriptor.ResourceDescriptor;
import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;
import org.jboss.arquillian.persistence.dbunit.dataset.DataSetBuilder;


Expand All @@ -41,9 +42,9 @@ public DataSetResourceDescriptor(String location, Format format)
}

@Override
public IDataSet getContent()
public IDataSet getContent(DBUnitConfiguration configuration)
{
return DataSetBuilder.builderFor(format).build(location);
return DataSetBuilder.builderFor(format).build(location, configuration);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.dbunit.dataset.ReplacementDataSet;
import org.dbunit.dataset.excel.XlsDataSet;
import org.dbunit.dataset.xml.FlatXmlDataSetBuilder;
import org.jboss.arquillian.persistence.dbunit.configuration.DBUnitConfiguration;
import org.jboss.arquillian.persistence.dbunit.data.descriptor.Format;
import org.jboss.arquillian.persistence.dbunit.dataset.json.JsonDataSet;
import org.jboss.arquillian.persistence.dbunit.dataset.xml.DtdResolver;
Expand All @@ -48,7 +49,7 @@ private DataSetBuilder(Format format)
this.format = format;
}

public IDataSet build(final String file)
public IDataSet build(final String file, DBUnitConfiguration configuration)
{
IDataSet dataSet = null;
try
Expand All @@ -65,7 +66,7 @@ public IDataSet build(final String file)
dataSet = loadYamlDataSet(file);
break;
case JSON:
dataSet = loadJsonDataSet(file);
dataSet = loadJsonDataSet(file, configuration);
break;
default:
throw new DBUnitInitializationException("Unsupported data type " + format);
Expand Down Expand Up @@ -120,10 +121,10 @@ private XlsDataSet loadExcelDataSet(final String file) throws IOException, DataS
return new XlsDataSet(inputStream);
}

private JsonDataSet loadJsonDataSet(final String file) throws IOException, DataSetException
private JsonDataSet loadJsonDataSet(final String file, DBUnitConfiguration configuration) throws IOException, DataSetException
{
final InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream(file);
return new JsonDataSet(inputStream);
return new JsonDataSet(inputStream, configuration);
}

private IDataSet loadYamlDataSet(final String file) throws IOException,
Expand Down
Loading