Skip to content

Commit

Permalink
Correct HS Time Calculation and Print Statements (#15005)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Concise print statements and HS on time calculation correction.

# Test Plan

Tested on ABR robots.

# Changelog

Previously, the heater shaker temperature on time was linked to the
shaker on time. This has been corrected to reference the correct command
string. If the heatershaker is not deactivated, the on time is
calculated with the protocol end time stamp.
Changed print statements for get robot logs and abr_google_drive to make
it more obvious to the user if there is an error or not.

# Review requests

<!--
Describe any requests for your reviewers here.
-->

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 authored and Carlos-fernandez committed May 20, 2024
1 parent b5255b0 commit a33b419
Show file tree
Hide file tree
Showing 5 changed files with 64 additions and 40 deletions.
11 changes: 8 additions & 3 deletions abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import webbrowser
import mimetypes
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
import googleapiclient # type: ignore[import]
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload

Expand Down Expand Up @@ -58,7 +59,6 @@ def list_folder(self, delete: Any = False) -> Set[str]:
break
if not file_names:
print("No folders or files found in Google Drive.")
print(f"{len(file_names)} item(s) in Google Drive")
return file_names

def delete_files(self, file_or_folder_id: str) -> None:
Expand Down Expand Up @@ -98,18 +98,22 @@ def upload_missing_files(self, storage_directory: str) -> None:
file for file in os.listdir(storage_directory) if file.endswith(".json")
)
missing_files = local_files_json - set(google_drive_files_json)
print(f"Missing files: {len(missing_files)}")
# Upload missing files.
uploaded_files = []
for file in missing_files:
file_path = os.path.join(storage_directory, file)
uploaded_file_id = google_drive.upload_file(self, file_path)
self.share_permissions(uploaded_file_id)
uploaded_files.append(
{"name": os.path.basename(file_path), "id": uploaded_file_id}
)
try:
self.share_permissions(uploaded_file_id)
except googleapiclient.errors.HttpError:
continue

# Fetch the updated file list after all files are uploaded
files = google_drive.list_folder(self)

file_names = [file for file in files]
for uploaded_file in uploaded_files:
this_name = uploaded_file["name"]
Expand All @@ -121,6 +125,7 @@ def upload_missing_files(self, storage_directory: str) -> None:
print(
f"File '{this_name}' was not found in the list of files after uploading."
)
print(f"{len(files)} item(s) in Google Drive")

def open_folder(self) -> Optional[str]:
"""Open folder in web browser."""
Expand Down
7 changes: 6 additions & 1 deletion abr-testing/abr_testing/automation/google_sheets_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import gspread # type: ignore[import]
import socket
import httplib2
import time as t
from datetime import datetime
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
from typing import Dict, List, Any, Set, Tuple
Expand Down Expand Up @@ -71,6 +72,10 @@ def write_to_row(self, data: List) -> None:
print("UNABLE TO CONNECT TO SERVER!!, CHECK CONNECTION")
except Exception as error:
print(error.__traceback__)
except gspread.exceptions.APIError:
print("Write quotes exceeded. Waiting 30 sec before writing.")
t.sleep(30)
self.worksheet.insert_row(data, index=self.row_index)

def delete_row(self, row_index: int) -> None:
"""Delete Row from google sheet."""
Expand All @@ -94,7 +99,7 @@ def get_column(self, column_number: int) -> Set[str]:
def get_index_row(self) -> int:
"""Check for the next available row to write too."""
row_index = len(self.get_column(1))
print("Row Index: ", row_index)
print(f"Row Index: {row_index} recorded on google sheet.")
return row_index

def update_row_index(self) -> None:
Expand Down
34 changes: 17 additions & 17 deletions abr-testing/abr_testing/data_collection/abr_calibration_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import gspread # type: ignore[import]
import sys
import time as t
from abr_testing.data_collection import read_robot_logs
from abr_testing.automation import google_drive_tool, google_sheets_tool

Expand All @@ -18,16 +19,20 @@ def check_for_duplicates(
headers: List[str],
) -> Union[List[str], None]:
"""Check google sheet for duplicates."""
t.sleep(5)
serials = google_sheet.get_column(col_1)
modify_dates = google_sheet.get_column(col_2)
# check for complete calibration.
if len(row[-1]) > 0:
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(f"Skipped row for instrument {serial}. Already on Google Sheet.")
return None
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
print(f"Writing calibration for: {serial}")
# Check for calibration time stamp.
if row[-1] is not None:
if len(row[-1]) > 0:
for serial, modify_date in zip(serials, modify_dates):
if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date:
print(
f"Skipped row for instrument {serial}. Already on Google Sheet."
)
return None
read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers)
print(f"Writing calibration for: {row[7]}")
return row


Expand Down Expand Up @@ -206,15 +211,10 @@ def upload_calibration_offsets(
if ip_or_all == "ALL":
ip_address_list = ip_file["ip_address_list"]
for ip in ip_address_list:
print(ip)
try:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
except Exception:
print(f"ERROR: Failed to read IP address: {ip}")
continue
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip, storage_directory
)
upload_calibration_offsets(calibration, storage_directory)
else:
saved_file_path, calibration = read_robot_logs.get_calibration_offsets(
ip_or_all, storage_directory
Expand Down
11 changes: 4 additions & 7 deletions abr-testing/abr_testing/data_collection/get_run_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,13 +104,10 @@ def get_all_run_logs(storage_directory: str) -> None:
ip_address_list = ip_file["ip_address_list"]
runs_from_storage = read_robot_logs.get_run_ids_from_google_drive(google_drive)
for ip in ip_address_list:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
google_drive.upload_missing_files(storage_directory)
except Exception:
print(f"ERROR: Failed to read IP address: {ip}.")
runs = get_run_ids_from_robot(ip)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
google_drive.upload_missing_files(storage_directory)


if __name__ == "__main__":
Expand Down
41 changes: 29 additions & 12 deletions abr-testing/abr_testing/data_collection/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import time as t
import json
import requests
import sys


def lpc_data(file_results: Dict[str, Any], protocol_info: Dict) -> List[Dict[str, Any]]:
Expand Down Expand Up @@ -72,9 +73,10 @@ def hs_commands(file_results: Dict[str, Any]) -> Dict[str, float]:
hs_home_count: float = 0.0
hs_speed: float = 0.0
hs_rotations: Dict[str, float] = dict()
hs_temps: Dict[str, float] = dict()
hs_temps: Dict[float, float] = dict()
temp_time = None
shake_time = None
deactivate_time = None
for command in commandData:
commandType = command["commandType"]
# Heatershaker
Expand All @@ -87,17 +89,21 @@ def hs_commands(file_results: Dict[str, Any]) -> Dict[str, float]:
# Home count
elif commandType == "heaterShaker/deactivateShaker":
hs_home_count += 1
shake_deactivate_time = datetime.strptime(
command.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
if shake_time is not None and shake_deactivate_time > shake_time:
shake_duration = (shake_deactivate_time - shake_time).total_seconds()
hs_rotations[hs_speed] = hs_rotations.get(hs_speed, 0.0) + (
(hs_speed * shake_duration) / 60
)
elif commandType == "heaterShaker/deactivateHeater":
deactivate_time = datetime.strptime(
command.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
if temp_time is not None and deactivate_time > temp_time:
temp_duration = (deactivate_time - temp_time).total_seconds()
hs_temps[hs_temp] = hs_temps.get(hs_temp, 0.0) + temp_duration
if shake_time is not None and deactivate_time > shake_time:
shake_duration = (deactivate_time - shake_time).total_seconds()
hs_rotations[hs_speed] = hs_rotations.get(hs_speed, 0.0) + (
(hs_speed * shake_duration) / 60
)
# of Rotations
elif commandType == "heaterShaker/setAndWaitForShakeSpeed":
hs_speed = command["params"]["rpm"]
Expand All @@ -111,6 +117,13 @@ def hs_commands(file_results: Dict[str, Any]) -> Dict[str, float]:
temp_time = datetime.strptime(
command.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
if temp_time is not None and deactivate_time is None:
# If heater shaker module is not deactivated, protocol completedAt time stamp used.
protocol_end = datetime.strptime(
file_results.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
temp_duration = (protocol_end - temp_time).total_seconds()
hs_temps[hs_temp] = hs_temps.get(hs_temp, 0.0) + temp_duration
hs_latch_sets = hs_latch_count / 2 # one set of open/close
hs_total_rotations = sum(hs_rotations.values())
hs_total_temp_time = sum(hs_temps.values())
Expand Down Expand Up @@ -254,7 +267,7 @@ def create_abr_data_sheet(
file_name_csv = file_name + ".csv"
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
return sheet_location
else:
with open(sheet_location, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
Expand Down Expand Up @@ -368,7 +381,6 @@ def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
print(f"There are {str(len(runs_to_save))} new run(s) to save.")
return runs_to_save


Expand Down Expand Up @@ -406,7 +418,7 @@ def write_to_sheets(
google_sheet.write_header(headers)
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(5) # Sleep added to avoid API error.
t.sleep(5)


def get_calibration_offsets(
Expand All @@ -415,9 +427,14 @@ def get_calibration_offsets(
"""Connect to robot via ip and get calibration data."""
calibration = dict()
# Robot Information [Name, Software Version]
response = requests.get(
f"http://{ip}:31950/health", headers={"opentrons-version": "3"}
)
try:
response = requests.get(
f"http://{ip}:31950/health", headers={"opentrons-version": "3"}
)
print(f"Connected to {ip}")
except Exception:
print(f"ERROR: Failed to read IP address: {ip}")
sys.exit()
health_data = response.json()
robot_name = health_data.get("name", "")
api_version = health_data.get("api_version", "")
Expand Down

0 comments on commit a33b419

Please sign in to comment.