Skip to content

Commit

Permalink
Merge pull request #182 from ADAPT/develop
Browse files Browse the repository at this point in the history
Merge to Master for next release
  • Loading branch information
Stuart Rhea authored Jun 22, 2022
2 parents 3f12539 + 19d1599 commit 4ee54e3
Show file tree
Hide file tree
Showing 6 changed files with 124 additions and 51 deletions.
11 changes: 8 additions & 3 deletions ISOv4Plugin/Mappers/Factories/TimeLogMapperFactory.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using System.Linq;
using AgGateway.ADAPT.ApplicationDataModel.LoggedData;
using AgGateway.ADAPT.ISOv4Plugin.ISOModels;
using AgGateway.ADAPT.ISOv4Plugin.Mappers.Manufacturers;

namespace AgGateway.ADAPT.ISOv4Plugin.Mappers.Factories
{
Expand All @@ -18,6 +19,7 @@ public class TimeLogMapperFactory
private readonly TimeLogMapper _timeLogMapper;
private readonly MultiFileTimeLogMapper _multiFileTimeLogMapper;
private readonly TaskDataMapper _taskDataMapper;
private readonly IManufacturer _manufacturer;

// A wrapper class to hold together ISOTimeLog and included ISODataLogValues.
// This avoids multiple calls to ISOTimeLog.GetTimeElement() which performs xml parsing on each call.
Expand Down Expand Up @@ -49,20 +51,23 @@ public TimeLogMapperFactory(TaskDataMapper taskDataMapper)
_taskDataMapper = taskDataMapper;
_timeLogMapper = new TimeLogMapper(taskDataMapper);
_multiFileTimeLogMapper = new MultiFileTimeLogMapper(taskDataMapper);

_manufacturer = ManufacturerFactory.GetManufacturer(taskDataMapper);
}

public IEnumerable<OperationData> ImportTimeLogs(ISOTask loggedTask, int? prescriptionID)
{
var timeLogGroups = GetTimeLogGroups(loggedTask);

var opearationDats = new List<OperationData>();
var operationDatas = new List<OperationData>();
foreach (var timeLogGroup in timeLogGroups)
{
opearationDats.AddRange(timeLogGroup.Count > 1
operationDatas.AddRange(timeLogGroup.Count > 1
? _multiFileTimeLogMapper.ImportTimeLogs(loggedTask, timeLogGroup, prescriptionID)
: _timeLogMapper.ImportTimeLogs(loggedTask, timeLogGroup, prescriptionID));
}
return opearationDats;

return _manufacturer?.PostProcessOperationData(_taskDataMapper, operationDatas) ?? operationDatas;
}

public IEnumerable<ISOTimeLog> ExportTimeLogs(IEnumerable<OperationData> operationDatas, string dataPath)
Expand Down
39 changes: 39 additions & 0 deletions ISOv4Plugin/Mappers/Manufacturers/CNH.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using AgGateway.ADAPT.ApplicationDataModel.Common;
using AgGateway.ADAPT.ApplicationDataModel.LoggedData;
using AgGateway.ADAPT.ApplicationDataModel.Products;
using AgGateway.ADAPT.ISOv4Plugin.ExtensionMethods;
using AgGateway.ADAPT.ISOv4Plugin.ISOModels;

namespace AgGateway.ADAPT.ISOv4Plugin.Mappers.Manufacturers
Expand Down Expand Up @@ -206,5 +210,40 @@ public string GetProductManufacturer(ISOProduct isoProduct)

return productManufacturer;
}


public IEnumerable<OperationData> PostProcessOperationData(TaskDataMapper taskDataMapper, IEnumerable<OperationData> operationDatas)
{
var result = new List<OperationData>();

var catalog = taskDataMapper.AdaptDataModel.Catalog;
foreach (var operationData in operationDatas)
{
var deviceModels = operationData.GetAllSections()
.Select(x => catalog.DeviceElementConfigurations.FirstOrDefault(y => y.Id.ReferenceId == x.DeviceConfigurationId))
.Where(x => x != null)
.Select(x => catalog.DeviceElements.FirstOrDefault(y => y.Id.ReferenceId == x.DeviceElementId))
.Where(x => x != null)
.Select(x => x.DeviceModelId)
.Distinct()
.Select(x => catalog.DeviceModels.FirstOrDefault(y => y.Id.ReferenceId == x))
.Where(x => x != null)
.ToList();
if (deviceModels.Count == 1 && !string.IsNullOrWhiteSpace(deviceModels[0].Description))
{
var trimmed = deviceModels[0].Description.Trim();
if (trimmed.EqualsIgnoreCase("Trip Computer Data"))
{
operationData.OperationType = OperationTypeEnum.DataCollection;
}
else if (trimmed.EqualsIgnoreCase("Vehicle Geometry"))
{
continue;
}
}
result.Add(operationData);
}
return result;
}
}
}
4 changes: 4 additions & 0 deletions ISOv4Plugin/Mappers/Manufacturers/ManufacturerFactory.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
using System.Collections.Generic;
using AgGateway.ADAPT.ApplicationDataModel.LoggedData;
using AgGateway.ADAPT.ApplicationDataModel.Products;
using AgGateway.ADAPT.ISOv4Plugin.ExtensionMethods;
using AgGateway.ADAPT.ISOv4Plugin.ISOModels;
Expand All @@ -11,6 +13,8 @@ internal interface IManufacturer
ProductTypeEnum? GetProductType(ISOProduct isoProduct);
CategoryEnum? GetProductCategory(ISOProduct isoProduct);
string GetProductManufacturer(ISOProduct isoProduct);

IEnumerable<OperationData> PostProcessOperationData(TaskDataMapper taskDataMapper, IEnumerable<OperationData> operationDatas);
}

internal static class ManufacturerFactory
Expand Down
110 changes: 66 additions & 44 deletions ISOv4Plugin/Mappers/MultiFileTimeLogMapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -45,75 +45,97 @@ public override IEnumerable<OperationData> ImportTimeLogs(ISOTask loggedTask, IE
protected override IEnumerable<ISOSpatialRow> ReadTimeLog(ISOTimeLog _timeLog, string _dataPath)
{
List<BinaryReaderHelper> readers = new List<BinaryReaderHelper>();

try
{
// Obtain binary readers for each time log
foreach (var timeLog in _timeLogs)
readers = CreateBinaryReaders();

// Below alogrithm is using queues for each binary file and matching records on TimeStart/Position.
// At start of each iteration a single record is read from binary file into queue.
// Records with earliest TimeStart are merged together and removed from each file queue.
while (true)
{
var reader = base.ReadTimeLog(timeLog, TaskDataPath);
if (reader != null)
// Read next record from each time log
var readersWithData = ReadNextRecords(readers);

if (readersWithData.Count == 0)
{
readers.Add(new BinaryReaderHelper
{
Enumerator = reader.GetEnumerator()
});
// No more records in each file. Stop processing.
break;
}
}

return ReadFromBinaryReaders(readers);
// Group records by TimeStart and East/North position, and then grab ones with earliest TimeStart.
// This leads to processing earliest records from any file first and keeping other records untouched.
// They will be processed in the next loop iteration along with any records read from already processed files.
var candidates = readersWithData.GroupBy(x => new { x.CurrentRecord.TimeStart, x.CurrentRecord.EastPosition, x.CurrentRecord.NorthPosition })
.OrderBy(x => x.Key.TimeStart)
.First().ToList();

// Merge data from all candidates
ISOSpatialRow result = MergeRecords(candidates);

yield return result;
}
}
finally
{
// Clean up readers
foreach (var reader in readers)
{
reader.Enumerator?.Dispose();
}
DisposeBinaryReaders(readers);
}
}

private IEnumerable<ISOSpatialRow> ReadFromBinaryReaders(List<BinaryReaderHelper> readers)
private List<BinaryReaderHelper> CreateBinaryReaders()
{
// Below alogrithm is using queues for each binary file and matching records on TimeStart/Position.
// At start of each iteration a single record is read from binary file into queue.
// Records with earliest TimeStart are merged together and removed from each file queue.
while (true)
List<BinaryReaderHelper> readers = new List<BinaryReaderHelper>();
// Obtain binary readers for each time log
foreach (var timeLog in _timeLogs)
{
// Read next record from each time log
foreach (var reader in readers)
var reader = base.ReadTimeLog(timeLog, TaskDataPath);
if (reader != null)
{
if (reader.CurrentRecord == null)
readers.Add(new BinaryReaderHelper
{
reader.CurrentRecord = reader.Enumerator.MoveNext() ? reader.Enumerator.Current : null;
}
Enumerator = reader.GetEnumerator()
});
}
}

// Only get readers which still have records;
var readersWithData = readers.Where(x => x.CurrentRecord != null).ToList();
if (readersWithData.Count == 0)
{
// No more records in each file. Stop processing.
break;
}
return readers;
}

// Group records by TimeStart and East/North position, and then grab ones with earliest TimeStart.
// This leads to processing earliest records from any file first and keeping other records untouched.
// They will be processed in the next loop iteration along with any records read from already processed files.
var candidates = readersWithData.GroupBy(x => new { x.CurrentRecord.TimeStart, x.CurrentRecord.EastPosition, x.CurrentRecord.NorthPosition })
.OrderBy(x => x.Key.TimeStart)
.First().ToList();
private void DisposeBinaryReaders(List<BinaryReaderHelper> readers)
{
foreach (var reader in readers)
{
reader.Enumerator?.Dispose();
}
}

// Merge data from all candidates into first record
ISOSpatialRow result = null;
foreach (var candidate in candidates)
private List<BinaryReaderHelper> ReadNextRecords(List<BinaryReaderHelper> readers)
{
foreach (var reader in readers)
{
if (reader.CurrentRecord == null)
{
result = result == null ? candidate.CurrentRecord : result.Merge(candidate.CurrentRecord);
// Clear current record to force reading next one
candidate.CurrentRecord = null;
reader.CurrentRecord = reader.Enumerator.MoveNext() ? reader.Enumerator.Current : null;
}
}

// Only return readers which still have records
return readers.Where(x => x.CurrentRecord != null).ToList();
}

yield return result;
private ISOSpatialRow MergeRecords(List<BinaryReaderHelper> candidates)
{
// Merge data from all candidates into first record
ISOSpatialRow result = null;
foreach (var candidate in candidates)
{
result = result == null ? candidate.CurrentRecord : result.Merge(candidate.CurrentRecord);
// Clear current record to force reading next one
candidate.CurrentRecord = null;
}
return result;
}

protected override ISOTime GetTimeElementFromTimeLog(ISOTimeLog isoTimeLog)
Expand Down
7 changes: 5 additions & 2 deletions ISOv4Plugin/Mappers/TaskDataMapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -389,8 +389,11 @@ public ApplicationDataModel.ADM.ApplicationDataModel Import(ISO11783_TaskData ta
if (devices.Any())
{
//See explanation of MergeSingleBinsIntoBoom in DeviceElementHierarchy
bool mergeBins = true;
bool.TryParse(Properties.GetProperty(MergeSingleBinsIntoBoom), out mergeBins);
bool mergeBins;
if (Properties == null || !bool.TryParse(Properties.GetProperty(MergeSingleBinsIntoBoom), out mergeBins))
{
mergeBins = true;
}

//Load the internal objects modeling hierarchies of DETs per DVC
DeviceElementHierarchies = new DeviceElementHierarchies(devices,
Expand Down
4 changes: 2 additions & 2 deletions ISOv4Plugin/Mappers/TimeLogMapper.cs
Original file line number Diff line number Diff line change
Expand Up @@ -301,12 +301,12 @@ protected IEnumerable<OperationData> ImportTimeLog(ISOTask loggedTask, ISOTimeLo
{
try
{
if (TaskDataMapper.Properties != null)
if (TaskDataMapper.Properties == null || !bool.TryParse(TaskDataMapper.Properties.GetProperty(TaskDataMapper.SpatialRecordDeferredExecution), out useDeferredExecution))
{
//Set this property to override the default behavior of deferring execution on the spatial data
//We historically pre-iterated this data, giving certain benefits but having negative memory impacts
//Going forward the default is to defer execution
bool.TryParse(TaskDataMapper.Properties.GetProperty(TaskDataMapper.SpatialRecordDeferredExecution), out useDeferredExecution);
useDeferredExecution = true;
}

if (!useDeferredExecution)
Expand Down

0 comments on commit 4ee54e3

Please sign in to comment.