diff --git a/ISOv4Plugin/Mappers/Factories/TimeLogMapperFactory.cs b/ISOv4Plugin/Mappers/Factories/TimeLogMapperFactory.cs index fd190ab..49de20f 100644 --- a/ISOv4Plugin/Mappers/Factories/TimeLogMapperFactory.cs +++ b/ISOv4Plugin/Mappers/Factories/TimeLogMapperFactory.cs @@ -7,6 +7,7 @@ using System.Linq; using AgGateway.ADAPT.ApplicationDataModel.LoggedData; using AgGateway.ADAPT.ISOv4Plugin.ISOModels; +using AgGateway.ADAPT.ISOv4Plugin.Mappers.Manufacturers; namespace AgGateway.ADAPT.ISOv4Plugin.Mappers.Factories { @@ -18,6 +19,7 @@ public class TimeLogMapperFactory private readonly TimeLogMapper _timeLogMapper; private readonly MultiFileTimeLogMapper _multiFileTimeLogMapper; private readonly TaskDataMapper _taskDataMapper; + private readonly IManufacturer _manufacturer; // A wrapper class to hold together ISOTimeLog and included ISODataLogValues. // This avoids multiple calls to ISOTimeLog.GetTimeElement() which performs xml parsing on each call. @@ -49,20 +51,23 @@ public TimeLogMapperFactory(TaskDataMapper taskDataMapper) _taskDataMapper = taskDataMapper; _timeLogMapper = new TimeLogMapper(taskDataMapper); _multiFileTimeLogMapper = new MultiFileTimeLogMapper(taskDataMapper); + + _manufacturer = ManufacturerFactory.GetManufacturer(taskDataMapper); } public IEnumerable ImportTimeLogs(ISOTask loggedTask, int? prescriptionID) { var timeLogGroups = GetTimeLogGroups(loggedTask); - var opearationDats = new List(); + var operationDatas = new List(); foreach (var timeLogGroup in timeLogGroups) { - opearationDats.AddRange(timeLogGroup.Count > 1 + operationDatas.AddRange(timeLogGroup.Count > 1 ? _multiFileTimeLogMapper.ImportTimeLogs(loggedTask, timeLogGroup, prescriptionID) : _timeLogMapper.ImportTimeLogs(loggedTask, timeLogGroup, prescriptionID)); } - return opearationDats; + + return _manufacturer?.PostProcessOperationData(_taskDataMapper, operationDatas) ?? operationDatas; } public IEnumerable ExportTimeLogs(IEnumerable operationDatas, string dataPath) diff --git a/ISOv4Plugin/Mappers/Manufacturers/CNH.cs b/ISOv4Plugin/Mappers/Manufacturers/CNH.cs index 393f9f4..574397b 100644 --- a/ISOv4Plugin/Mappers/Manufacturers/CNH.cs +++ b/ISOv4Plugin/Mappers/Manufacturers/CNH.cs @@ -1,6 +1,10 @@ using System.Collections.Generic; using System.Globalization; +using System.Linq; +using AgGateway.ADAPT.ApplicationDataModel.Common; +using AgGateway.ADAPT.ApplicationDataModel.LoggedData; using AgGateway.ADAPT.ApplicationDataModel.Products; +using AgGateway.ADAPT.ISOv4Plugin.ExtensionMethods; using AgGateway.ADAPT.ISOv4Plugin.ISOModels; namespace AgGateway.ADAPT.ISOv4Plugin.Mappers.Manufacturers @@ -206,5 +210,40 @@ public string GetProductManufacturer(ISOProduct isoProduct) return productManufacturer; } + + + public IEnumerable PostProcessOperationData(TaskDataMapper taskDataMapper, IEnumerable operationDatas) + { + var result = new List(); + + var catalog = taskDataMapper.AdaptDataModel.Catalog; + foreach (var operationData in operationDatas) + { + var deviceModels = operationData.GetAllSections() + .Select(x => catalog.DeviceElementConfigurations.FirstOrDefault(y => y.Id.ReferenceId == x.DeviceConfigurationId)) + .Where(x => x != null) + .Select(x => catalog.DeviceElements.FirstOrDefault(y => y.Id.ReferenceId == x.DeviceElementId)) + .Where(x => x != null) + .Select(x => x.DeviceModelId) + .Distinct() + .Select(x => catalog.DeviceModels.FirstOrDefault(y => y.Id.ReferenceId == x)) + .Where(x => x != null) + .ToList(); + if (deviceModels.Count == 1 && !string.IsNullOrWhiteSpace(deviceModels[0].Description)) + { + var trimmed = deviceModels[0].Description.Trim(); + if (trimmed.EqualsIgnoreCase("Trip Computer Data")) + { + operationData.OperationType = OperationTypeEnum.DataCollection; + } + else if (trimmed.EqualsIgnoreCase("Vehicle Geometry")) + { + continue; + } + } + result.Add(operationData); + } + return result; + } } } diff --git a/ISOv4Plugin/Mappers/Manufacturers/ManufacturerFactory.cs b/ISOv4Plugin/Mappers/Manufacturers/ManufacturerFactory.cs index a12d46d..025cb1f 100644 --- a/ISOv4Plugin/Mappers/Manufacturers/ManufacturerFactory.cs +++ b/ISOv4Plugin/Mappers/Manufacturers/ManufacturerFactory.cs @@ -1,3 +1,5 @@ +using System.Collections.Generic; +using AgGateway.ADAPT.ApplicationDataModel.LoggedData; using AgGateway.ADAPT.ApplicationDataModel.Products; using AgGateway.ADAPT.ISOv4Plugin.ExtensionMethods; using AgGateway.ADAPT.ISOv4Plugin.ISOModels; @@ -11,6 +13,8 @@ internal interface IManufacturer ProductTypeEnum? GetProductType(ISOProduct isoProduct); CategoryEnum? GetProductCategory(ISOProduct isoProduct); string GetProductManufacturer(ISOProduct isoProduct); + + IEnumerable PostProcessOperationData(TaskDataMapper taskDataMapper, IEnumerable operationDatas); } internal static class ManufacturerFactory diff --git a/ISOv4Plugin/Mappers/MultiFileTimeLogMapper.cs b/ISOv4Plugin/Mappers/MultiFileTimeLogMapper.cs index 590316b..448b910 100644 --- a/ISOv4Plugin/Mappers/MultiFileTimeLogMapper.cs +++ b/ISOv4Plugin/Mappers/MultiFileTimeLogMapper.cs @@ -45,75 +45,97 @@ public override IEnumerable ImportTimeLogs(ISOTask loggedTask, IE protected override IEnumerable ReadTimeLog(ISOTimeLog _timeLog, string _dataPath) { List readers = new List(); + try { - // Obtain binary readers for each time log - foreach (var timeLog in _timeLogs) + readers = CreateBinaryReaders(); + + // Below alogrithm is using queues for each binary file and matching records on TimeStart/Position. + // At start of each iteration a single record is read from binary file into queue. + // Records with earliest TimeStart are merged together and removed from each file queue. + while (true) { - var reader = base.ReadTimeLog(timeLog, TaskDataPath); - if (reader != null) + // Read next record from each time log + var readersWithData = ReadNextRecords(readers); + + if (readersWithData.Count == 0) { - readers.Add(new BinaryReaderHelper - { - Enumerator = reader.GetEnumerator() - }); + // No more records in each file. Stop processing. + break; } - } - return ReadFromBinaryReaders(readers); + // Group records by TimeStart and East/North position, and then grab ones with earliest TimeStart. + // This leads to processing earliest records from any file first and keeping other records untouched. + // They will be processed in the next loop iteration along with any records read from already processed files. + var candidates = readersWithData.GroupBy(x => new { x.CurrentRecord.TimeStart, x.CurrentRecord.EastPosition, x.CurrentRecord.NorthPosition }) + .OrderBy(x => x.Key.TimeStart) + .First().ToList(); + + // Merge data from all candidates + ISOSpatialRow result = MergeRecords(candidates); + + yield return result; + } } finally { // Clean up readers - foreach (var reader in readers) - { - reader.Enumerator?.Dispose(); - } + DisposeBinaryReaders(readers); } } - private IEnumerable ReadFromBinaryReaders(List readers) + private List CreateBinaryReaders() { - // Below alogrithm is using queues for each binary file and matching records on TimeStart/Position. - // At start of each iteration a single record is read from binary file into queue. - // Records with earliest TimeStart are merged together and removed from each file queue. - while (true) + List readers = new List(); + // Obtain binary readers for each time log + foreach (var timeLog in _timeLogs) { - // Read next record from each time log - foreach (var reader in readers) + var reader = base.ReadTimeLog(timeLog, TaskDataPath); + if (reader != null) { - if (reader.CurrentRecord == null) + readers.Add(new BinaryReaderHelper { - reader.CurrentRecord = reader.Enumerator.MoveNext() ? reader.Enumerator.Current : null; - } + Enumerator = reader.GetEnumerator() + }); } + } - // Only get readers which still have records; - var readersWithData = readers.Where(x => x.CurrentRecord != null).ToList(); - if (readersWithData.Count == 0) - { - // No more records in each file. Stop processing. - break; - } + return readers; + } - // Group records by TimeStart and East/North position, and then grab ones with earliest TimeStart. - // This leads to processing earliest records from any file first and keeping other records untouched. - // They will be processed in the next loop iteration along with any records read from already processed files. - var candidates = readersWithData.GroupBy(x => new { x.CurrentRecord.TimeStart, x.CurrentRecord.EastPosition, x.CurrentRecord.NorthPosition }) - .OrderBy(x => x.Key.TimeStart) - .First().ToList(); + private void DisposeBinaryReaders(List readers) + { + foreach (var reader in readers) + { + reader.Enumerator?.Dispose(); + } + } - // Merge data from all candidates into first record - ISOSpatialRow result = null; - foreach (var candidate in candidates) + private List ReadNextRecords(List readers) + { + foreach (var reader in readers) + { + if (reader.CurrentRecord == null) { - result = result == null ? candidate.CurrentRecord : result.Merge(candidate.CurrentRecord); - // Clear current record to force reading next one - candidate.CurrentRecord = null; + reader.CurrentRecord = reader.Enumerator.MoveNext() ? reader.Enumerator.Current : null; } + } + + // Only return readers which still have records + return readers.Where(x => x.CurrentRecord != null).ToList(); + } - yield return result; + private ISOSpatialRow MergeRecords(List candidates) + { + // Merge data from all candidates into first record + ISOSpatialRow result = null; + foreach (var candidate in candidates) + { + result = result == null ? candidate.CurrentRecord : result.Merge(candidate.CurrentRecord); + // Clear current record to force reading next one + candidate.CurrentRecord = null; } + return result; } protected override ISOTime GetTimeElementFromTimeLog(ISOTimeLog isoTimeLog) diff --git a/ISOv4Plugin/Mappers/TaskDataMapper.cs b/ISOv4Plugin/Mappers/TaskDataMapper.cs index 7f22382..34bf852 100644 --- a/ISOv4Plugin/Mappers/TaskDataMapper.cs +++ b/ISOv4Plugin/Mappers/TaskDataMapper.cs @@ -389,8 +389,11 @@ public ApplicationDataModel.ADM.ApplicationDataModel Import(ISO11783_TaskData ta if (devices.Any()) { //See explanation of MergeSingleBinsIntoBoom in DeviceElementHierarchy - bool mergeBins = true; - bool.TryParse(Properties.GetProperty(MergeSingleBinsIntoBoom), out mergeBins); + bool mergeBins; + if (Properties == null || !bool.TryParse(Properties.GetProperty(MergeSingleBinsIntoBoom), out mergeBins)) + { + mergeBins = true; + } //Load the internal objects modeling hierarchies of DETs per DVC DeviceElementHierarchies = new DeviceElementHierarchies(devices, diff --git a/ISOv4Plugin/Mappers/TimeLogMapper.cs b/ISOv4Plugin/Mappers/TimeLogMapper.cs index aba2c50..5b31472 100644 --- a/ISOv4Plugin/Mappers/TimeLogMapper.cs +++ b/ISOv4Plugin/Mappers/TimeLogMapper.cs @@ -301,12 +301,12 @@ protected IEnumerable ImportTimeLog(ISOTask loggedTask, ISOTimeLo { try { - if (TaskDataMapper.Properties != null) + if (TaskDataMapper.Properties == null || !bool.TryParse(TaskDataMapper.Properties.GetProperty(TaskDataMapper.SpatialRecordDeferredExecution), out useDeferredExecution)) { //Set this property to override the default behavior of deferring execution on the spatial data //We historically pre-iterated this data, giving certain benefits but having negative memory impacts //Going forward the default is to defer execution - bool.TryParse(TaskDataMapper.Properties.GetProperty(TaskDataMapper.SpatialRecordDeferredExecution), out useDeferredExecution); + useDeferredExecution = true; } if (!useDeferredExecution)