-
Notifications
You must be signed in to change notification settings - Fork 34
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Added validate_order method #97
base: main
Are you sure you want to change the base?
Changes from 3 commits
a80feb6
3773d79
239c14f
b766665
5f7fd58
c44a41f
3057571
17adc9f
573b0e1
8520807
9323208
f43ae9f
de0d2a4
633b43b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -1354,3 +1354,71 @@ def default(self, obj): | |
return obj.item() | ||
return obj.tolist() | ||
return json.JSONEncoder.default(self, obj) | ||
|
||
|
||
def validate_order(run_iterable): | ||
""" | ||
Validates the order of a Bluesky Run. | ||
|
||
Parameters | ||
--------- | ||
run_iterable: iterable | ||
A Bluesky run in the form of an iterable of name, doc pairs. | ||
""" | ||
datum_cache = {} | ||
resource_cache = {} | ||
descriptor_cache = {} | ||
event_cache = defaultdict(list) | ||
last_index = 0 | ||
|
||
for i, (name, doc) in enumerate(run_iterable): | ||
last_index = i | ||
|
||
if name == 'start': start = (i, doc) | ||
if name == 'stop': stop = (i, doc) | ||
if name == 'resource': resource_cache[doc['uid']] = (i, doc) | ||
if name == 'descriptor': descriptor_cache[doc['uid']] = (i, doc) | ||
if name == 'datum': datum_cache[doc['datum_id']] = (i, doc) | ||
if name == 'datum_page': | ||
for datum in unpack_datum_page(doc): | ||
datum_cache[datum['datum_id']] = (i, datum) | ||
if name == 'event': event_cache[doc['descriptor']].append((i, doc)) | ||
if name == 'event_page': | ||
for event in unpack_event_page(doc): | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This does not address:
|
||
event_cache[event['descriptor']].append((i, event)) | ||
|
||
# Check that the start document is the first document. | ||
assert start[0] == 1 | ||
|
||
# Check the the stop document is the last document. | ||
assert stop[0] == last_index | ||
|
||
# For each stream check that events are in timestamp order. | ||
for descriptor_id, event_stream in event_cache.values(): | ||
t0 = None | ||
for index, event in event_stream: | ||
t1 = event['time'] | ||
if t0: | ||
assert t1 > t0 | ||
t0 = t1 | ||
|
||
# Check that descriptor doc is received before the first event of that | ||
# stream. | ||
for descriptor_id, event_stream in event_cache.values(): | ||
assert event_stream[0][0] > | ||
descriptor_cache[event_stream[0]['descriptor']][0] | ||
|
||
# For each event check that referenced datum are received first. | ||
for descriptor_id, event_stream in event_cache.items(): | ||
external_keys = set(descriptor_cache[descriptor_id]['data_keys']['external'].keys()) | ||
for i, event in event_stream: | ||
# Check that the filled keys match the external keys defined in the | ||
# descriptor. | ||
assert external_keys == set(event['filled'].keys()) | ||
for key, value in event.items(): | ||
if key in external_keys: | ||
assert datum_cache[value][0] < i | ||
|
||
# For each datum check that the referenced resource is received first. | ||
for i, datum in datum_cache.values() | ||
assert resource_cache[datum['resource']][0] < i |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Can you copy over the content from #98 here to explain what constraints this is enforcing?