Skip to content

Commit

Permalink
Fix issue with serial data handler.
Browse files Browse the repository at this point in the history
  • Loading branch information
cmeyer committed Dec 7, 2024
1 parent dd26c62 commit 357e031
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 3 deletions.
18 changes: 15 additions & 3 deletions nion/instrumentation/Acquisition.py
Original file line number Diff line number Diff line change
Expand Up @@ -2532,6 +2532,10 @@ def handle_data_available(self, packet: DataStreamEventArgs) -> None:


class SerialDataHandler(DataHandler):
"""A data handler that sends data to a list of handlers in sequence.
The data is not reshaped in any way.
"""
def __init__(self) -> None:
super().__init__()
self.__data_handlers = list[DataHandler]()
Expand All @@ -2543,13 +2547,17 @@ def add_data_handler(self, data_handler: DataHandler, count: int) -> None:
self.__counts.append(count)

def handle_data_available(self, packet: DataStreamEventArgs) -> None:
# send the data to the appropriate data handler.
# the data handler will be determined by the index.
# update the indexes for handling the next packet.
index = self.__indexes.get(packet.channel, 0)
current_index = index
for i, count in enumerate(self.__counts):
if index < count:
if current_index < count:
self.__data_handlers[i].handle_data_available(packet)
self.__indexes[packet.channel] = (index + (packet.count or 1)) % sum(self.__counts)
break
index -= count
current_index -= count


class CollectionDataHandler(DataHandler):
Expand Down Expand Up @@ -2859,6 +2867,10 @@ def __send_data_multiple(self, channel: Channel, data_and_metadata: DataAndMetad


class StackedDataHandler(DataHandler):
"""A data handler that stacks count sections into new data with a new index of size height.
Send 4x10x10 then 2x10x10 to get a 6x10x10 output.
"""
def __init__(self, count: int, height: int) -> None:
super().__init__()
self.__count = count
Expand All @@ -2872,7 +2884,7 @@ def handle_data_available(self, data_stream_event: DataStreamEventArgs) -> None:
state = DataStreamStateEnum.PARTIAL

if data_stream_event.state == DataStreamStateEnum.COMPLETE:
self.__index = (self.__indexes.get(data_stream_event.channel, 0) + 1) % self.__count
self.__indexes[data_stream_event.channel] = (self.__indexes.get(data_stream_event.channel, 0) + 1) % self.__count

# print(f"{data_stream_event.state=} {self.__index=}/{self.__count=}")

Expand Down
32 changes: 32 additions & 0 deletions nion/instrumentation/test/Acquisition_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -830,3 +830,35 @@ def handle_error(e: Exception) -> None:
self.assertTrue(had_error)
self.assertTrue(maker.is_error)
self.assertAlmostEqual(1/8, collector.progress)

def test_serial_handler(self) -> None:

class CountingDataHandler(Acquisition.DataHandler):
def __init__(self) -> None:
super().__init__()
self.count = 0

def handle_data_available(self, packet: Acquisition.DataStreamEventArgs) -> None:
self.count += packet.count or 1

counter1 = CountingDataHandler()
counter2 = CountingDataHandler()

serial_handler = Acquisition.SerialDataHandler()
serial_handler.add_data_handler(counter1, 4)
serial_handler.add_data_handler(counter2, 4)

channel = Acquisition.Channel("0")
data_descriptor = DataAndMetadata.DataDescriptor(False, 0, 1)
data_metadata = DataAndMetadata.DataMetadata(((4,), float), data_descriptor=data_descriptor)
source_data = numpy.zeros((4, 1), dtype=float)

data_stream_event = Acquisition.DataStreamEventArgs(channel, data_metadata, source_data, 2, (slice(0, 2), slice(None)), Acquisition.DataStreamStateEnum.COMPLETE)

# send the data 2 rows at a time
for _ in range(4):
serial_handler.handle_data_available(data_stream_event)

# check the counts to ensure the data is sent to both handlers
self.assertEqual(4, counter1.count)
self.assertEqual(4, counter2.count)

0 comments on commit 357e031

Please sign in to comment.