Skip to content

Commit

Permalink
Merge pull request #1488 from papr/fix_pupil_pos_by_id
Browse files Browse the repository at this point in the history
pupil_positions_by_id: Correctly zip topics, datum, and timestamps
  • Loading branch information
papr authored Apr 29, 2019
2 parents 660ce6d + 5f55c2f commit 6fcda8d
Showing 1 changed file with 24 additions and 16 deletions.
40 changes: 24 additions & 16 deletions pupil_src/shared_modules/pupil_producers.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,13 +215,19 @@ def draw_legend(self, label, width, height, scale):
thickness=4.0 * scale,
)

def create_pupil_positions_by_id(self, topics, data, timestamps):
def create_pupil_positions_by_id(self, ts_to_datum, ts_to_topic):
topic_data_ts = (
(ts_to_topic[ts], ts, datum) for ts, datum in ts_to_datum.items()
)
return self.create_pupil_positions_by_id_iterative(topic_data_ts)

def create_pupil_positions_by_id_iterative(self, topic_data_ts):
id0_id1_data = collections.deque(), collections.deque()
id0_id1_time = collections.deque(), collections.deque()

topic_data_ts = zip(topics, data, timestamps)
for topic, datum, timestamp in topic_data_ts:
for topic, timestamp, datum in topic_data_ts:
eye_id = int(topic[-1]) # use topic to identify eye
assert eye_id == datum["id"]
id0_id1_data[eye_id].append(datum)
id0_id1_time[eye_id].append(timestamp)

Expand All @@ -238,8 +244,10 @@ def __init__(self, g_pool):
g_pool.pupil_positions = pm.Bisector(
pupil_data_file.data, pupil_data_file.timestamps
)
g_pool.pupil_positions_by_id = self.create_pupil_positions_by_id(
pupil_data_file.topics, pupil_data_file.data, pupil_data_file.timestamps
g_pool.pupil_positions_by_id = self.create_pupil_positions_by_id_iterative(
zip(
pupil_data_file.topics, pupil_data_file.data, pupil_data_file.timestamps
)
)

self._pupil_changed_announcer.announce_existing()
Expand All @@ -256,7 +264,7 @@ def init_ui(self):
class Offline_Pupil_Detection(Pupil_Producer_Base):
"""docstring for Offline_Pupil_Detection"""

session_data_version = 2
session_data_version = 3
session_data_name = "offline_pupil"

def __init__(self, g_pool):
Expand Down Expand Up @@ -356,6 +364,7 @@ def recent_events(self, events):
# pupil data only has one remaining frame
payload_serialized = next(remaining_frames)
pupil_datum = fm.Serialized_Dict(msgpack_bytes=payload_serialized)
assert int(topic[-1]) == pupil_datum["id"]
self.pupil_positions[pupil_datum["timestamp"]] = pupil_datum
self.id_topics[pupil_datum["timestamp"]] = topic
else:
Expand All @@ -375,12 +384,11 @@ def recent_events(self, events):
)

def correlate_publish(self):
time = tuple(self.pupil_positions.keys())
data = tuple(self.pupil_positions.values())
topics = tuple(self.id_topics.values())
self.g_pool.pupil_positions = pm.Bisector(data, time)
self.g_pool.pupil_positions = pm.Bisector(
tuple(self.pupil_positions.values()), tuple(self.pupil_positions.keys())
)
self.g_pool.pupil_positions_by_id = self.create_pupil_positions_by_id(
topics, data, time
self.pupil_positions, self.id_topics
)

self._pupil_changed_announcer.announce_new()
Expand All @@ -402,13 +410,12 @@ def cleanup(self):
self.save_offline_data()

def save_offline_data(self):
ts_topic_data_zip = zip(
self.pupil_positions.keys(),
self.id_topics.values(),
self.pupil_positions.values(),
topic_data_ts = (
(self.id_topics[ts], datum, ts)
for ts, datum in self.pupil_positions.items()
)
with fm.PLData_Writer(self.data_dir, "offline_pupil") as writer:
for timestamp, topic, datum in ts_topic_data_zip:
for topic, datum, timestamp in topic_data_ts:
writer.append_serialized(timestamp, topic, datum.serialized)

session_data = {}
Expand All @@ -421,6 +428,7 @@ def save_offline_data(self):

def redetect(self):
self.pupil_positions.clear() # delete previously detected pupil positions
self.id_topics.clear()
self.g_pool.pupil_positions = pm.Bisector([], [])
self.detection_finished_flag = False
self.detection_paused = False
Expand Down

0 comments on commit 6fcda8d

Please sign in to comment.