Skip to content

Commit

Permalink
Script to pull unique run logs from ABR robots
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 committed Feb 27, 2024
1 parent c142da1 commit a3dff9c
Show file tree
Hide file tree
Showing 3 changed files with 149 additions and 0 deletions.
Empty file.
15 changes: 15 additions & 0 deletions hardware-testing/hardware_testing/abr_tools/abr_robots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
"""ABR Robot IPs."""

ABR_IPS = [
"10.14.12.159",
"10.14.12.161",
"10.14.12.126",
"10.14.12.112",
"10.14.12.124",
"10.14.12.163",
"10.14.12.162",
"10.14.12.165",
"10.14.12.164",
"10.14.12.168",
"10.14.12.167",
]
134 changes: 134 additions & 0 deletions hardware-testing/hardware_testing/abr_tools/abr_run_logs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
"""ABR Run Log Pull."""
from .abr_robots import ABR_IPS
import argparse
import os
import json
import traceback
import requests
from typing import Set, Dict, Any


def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
"""Read all files in long term storage directory and read run id. Add run id to a set. Return run id set."""
os.makedirs(storage_directory, exist_ok=True)
list_of_files = os.listdir(storage_directory)
run_ids = set()
for this_file in list_of_files:
read_file = os.path.join(storage_directory, this_file)
try:
file_results = json.load(open(read_file))
except json.JSONDecodeError:
print(f"Ignoring unparsable file {read_file}.")
continue
run_id = file_results["run_id"]
run_ids.add(run_id)
return run_ids


def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
return runs_to_save


def get_run_ids_from_robot(ip: str) -> Set[str]:
"""Get all completed runs from each robot."""
run_ids = set()
response = requests.get(
f"http://{ip}:31950/runs", headers={"opentrons-version": "3"}
)
run_data = response.json()
run_list = run_data["data"]
for run in run_list:
run_id = run["id"]
if not run["current"]:
run_ids.add(run_id)
return run_ids


def get_run_data(one_run: Any, ip: str) -> Dict[str, Any]:
response = requests.get(
f"http://{ip}:31950/runs/{one_run}/commands",
headers={"opentrons-version": "3"},
params={"cursor": 0, "pageLength": 0},
)
data = response.json()
command_count = data["meta"]["totalLength"]
page_length = 100
commands = list()
run = dict()
for cursor in range(0, command_count, page_length):
response = requests.get(
f"http://{ip}:31950/runs/{one_run}/commands",
headers={"opentrons-version": "3"},
params={"cursor": cursor, "pageLength": page_length},
)
command_data = response.json()
commands.extend(command_data["data"])
run["commands"] = commands
response = requests.get(
f"http://{ip}:31950/runs/{one_run}", headers={"opentrons-version": "3"}
)
run_meta_data = response.json()
protocol_id = run_meta_data["data"]["protocolId"]
run.update(run_meta_data["data"])
response = requests.get(
f"http://{ip}:31950/protocols/{protocol_id}", headers={"opentrons-version": "3"}
)
protocol_data = response.json()
run["protocol"] = protocol_data["data"]
response = requests.get(
f"http://{ip}:31950/health", headers={"opentrons-version": "3"}
)
health_data = response.json()
robot_name = health_data["name"]
try:
robot_serial = health_data["robot_serial"]
except:
robot_serial = "unknown"
run["robot_name"] = robot_name
run["run_id"] = one_run
run["robot_serial"] = robot_serial
return run


def save_runs(runs_to_save: Set[str], ip: str, storage_directory: str) -> None:
for a_run in runs_to_save:
data = get_run_data(a_run, ip)
robot_name = data["robot_name"]
data_file_name = data["robot_name"] + "_" + data["run_id"] + ".json"
json.dump(data, open(os.path.join(storage_directory, data_file_name), mode="w"))
print(
f"Saved {len(runs_to_save)} run(s) from robot {robot_name} with IP address {ip}."
)
pass


def get_all_run_logs(storage_directory: str):
"""Connect to each ABR robot to read run log data.
Read each robot's list of unique run log IDs and compare them to all IDs for all of the runs in storage.
Any ID that is not in storage, download the run log and put it in storage."""

runs_from_storage = get_run_ids_from_storage(storage_directory)
for ip in ABR_IPS:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
except Exception:
print(f"Failed to read IP address: {ip}.")
traceback.print_exc()
pass


if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.")
parser.add_argument(
"storage_directory",
metavar="STORAGE_DIRECTORY",
type=str,
nargs=1,
help="Path to long term storage directory for run logs.",
)
args = parser.parse_args()
get_all_run_logs(args.storage_directory[0])

0 comments on commit a3dff9c

Please sign in to comment.