Skip to content

Commit

Permalink
removed class from usable module
Browse files Browse the repository at this point in the history
  • Loading branch information
rclarke0 committed Mar 19, 2024
1 parent 088a9ec commit 8b11175
Show file tree
Hide file tree
Showing 5 changed files with 134 additions and 138 deletions.
20 changes: 10 additions & 10 deletions hardware-testing/hardware_testing/abr_tools/abr_command_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import sys
import json
from datetime import datetime, timedelta
from read_robot_logs import ReadRobotLogs
from . import read_robot_logs


def set_up_data_sheet(
Expand All @@ -20,7 +20,7 @@ def set_up_data_sheet(
except FileNotFoundError:
print("No google sheets credentials. Add credentials to storage notebook.")
local_file_str = google_sheet_name + "-" + commandTypes
csv_name = ReadRobotLogs.create_abr_data_sheet(
csv_name = read_robot_logs.create_abr_data_sheet(
storage_directory, local_file_str, headers
)

Expand Down Expand Up @@ -305,7 +305,7 @@ def command_data_dictionary(
error_code,
error_instrument,
error_level,
) = ReadRobotLogs.get_error_info(file_results)
) = read_robot_logs.get_error_info(file_results)

all_pipette_commands_list = pipette_commands(file_results)
all_module_commands_list = module_commands(file_results)
Expand Down Expand Up @@ -487,33 +487,33 @@ def command_data_dictionary(
google_sheet_movement, csv_movement = set_up_data_sheet(
3, google_sheet_name, "Movement", movement_headers
)
runs_from_storage = ReadRobotLogs.get_run_ids_from_storage(storage_directory)
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
i = 0
n = 0
m = 0
p = 0
runs_in_sheet = ReadRobotLogs.read_abr_data_sheet(
runs_in_sheet = read_robot_logs.read_abr_data_sheet(
storage_directory, csv_instruments, google_sheet_instruments
)
runs_to_save = ReadRobotLogs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
(
runs_and_instrument_commands,
runs_and_module_commands,
runs_and_setup_commands,
runs_and_move_commands,
) = command_data_dictionary(runs_to_save, storage_directory, i, m, n, p)
ReadRobotLogs.write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_instrument_commands,
storage_directory,
csv_instruments,
google_sheet_instruments,
)
ReadRobotLogs.write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_module_commands, storage_directory, csv_modules, google_sheet_modules
)
ReadRobotLogs.write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_setup_commands, storage_directory, csv_setup, google_sheet_setup
)
ReadRobotLogs.write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_move_commands, storage_directory, csv_movement, google_sheet_movement
)
14 changes: 7 additions & 7 deletions hardware-testing/hardware_testing/abr_tools/abr_read_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json
import sys
from datetime import datetime, timedelta
from hardware_testing.abr_tools.read_robot_logs import ReadRobotLogs
from . import read_robot_logs


def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
Expand Down Expand Up @@ -53,7 +53,7 @@ def create_data_dictionary(
error_code,
error_instrument,
error_level,
) = ReadRobotLogs.get_error_info(file_results)
) = read_robot_logs.get_error_info(file_results)
all_modules = get_modules(file_results)

start_time_str, complete_time_str, start_date, run_time_min = (
Expand Down Expand Up @@ -171,15 +171,15 @@ def create_data_dictionary(
"magneticBlockV1",
"thermocyclerModuleV2",
]
runs_from_storage = ReadRobotLogs.get_run_ids_from_storage(storage_directory)
file_name_csv = ReadRobotLogs.create_abr_data_sheet(
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
file_name_csv = read_robot_logs.create_abr_data_sheet(
storage_directory, file_name, headers
)
runs_in_sheet = ReadRobotLogs.read_abr_data_sheet(
runs_in_sheet = read_robot_logs.read_abr_data_sheet(
storage_directory, file_name_csv, google_sheet
)
runs_to_save = ReadRobotLogs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_and_robots = create_data_dictionary(runs_to_save, storage_directory)
ReadRobotLogs.write_to_abr_sheet(
read_robot_logs.write_to_abr_sheet(
runs_and_robots, storage_directory, file_name_csv, google_sheet
)
6 changes: 3 additions & 3 deletions hardware-testing/hardware_testing/abr_tools/abr_run_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import json
import traceback
import requests
from hardware_testing.abr_tools.read_robot_logs import ReadRobotLogs
from . import read_robot_logs


def get_run_ids_from_robot(ip: str) -> Set[str]:
Expand Down Expand Up @@ -95,11 +95,11 @@ def get_all_run_logs(storage_directory: str) -> None:
ip_address_list = ip_file["ip_address_list"]
print(ip_address_list)

runs_from_storage = ReadRobotLogs.get_run_ids_from_storage(storage_directory)
runs_from_storage = read_robot_logs.get_run_ids_from_storage(storage_directory)
for ip in ip_address_list:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = ReadRobotLogs.get_unseen_run_ids(runs, runs_from_storage)
runs_to_save = read_robot_logs.get_unseen_run_ids(runs, runs_from_storage)
save_runs(runs_to_save, ip, storage_directory)
except Exception:
print(f"Failed to read IP address: {ip}.")
Expand Down
4 changes: 2 additions & 2 deletions hardware-testing/hardware_testing/abr_tools/abr_scale.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from typing import Any, List
import argparse
import csv
from hardware_testing.abr_tools.read_robot_logs import ReadRobotLogs
from . import read_robot_logs


def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> None:
Expand Down Expand Up @@ -77,7 +77,7 @@ def write_to_sheets(file_name_csv: str, google_sheet: Any, row_list: List) -> No
is_stable = False
# Set up csv sheet
headers = ["Robot", "Date", "Timestamp", "Labware", "Mass (g)", "Measurement Step"]
all_data_csv = ReadRobotLogs.create_abr_data_sheet(
all_data_csv = read_robot_logs.create_abr_data_sheet(
storage_directory, file_name, headers
)
# Set up google sheet
Expand Down
228 changes: 112 additions & 116 deletions hardware-testing/hardware_testing/abr_tools/read_robot_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,126 +12,122 @@
import json


class ReadRobotLogs:
"""Functions to read robot logs and add to google sheet."""
def create_abr_data_sheet(storage_directory: str, file_name: str, headers: List) -> str:
"""Creates csv file to log ABR data."""
file_name_csv = file_name + ".csv"
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
else:
with open(sheet_location, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
print(f"Created file. Located: {sheet_location}.")
return file_name_csv

def create_abr_data_sheet(
storage_directory: str, file_name: str, headers: List
) -> str:
"""Creates csv file to log ABR data."""
file_name_csv = file_name + ".csv"
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
else:
with open(sheet_location, "w") as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
print(f"Created file. Located: {sheet_location}.")
return file_name_csv

def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]:
"""Determines if errors exist in run log and documents them."""
error_levels = []
# Read error levels file
with open(ERROR_LEVELS_PATH, "r") as error_file:
error_levels = list(csv.reader(error_file))
num_of_errors = len(file_results["errors"])
if num_of_errors == 0:
error_type = ""
error_code = ""
error_instrument = ""
error_level = ""
return 0, error_type, error_code, error_instrument, error_level
commands_of_run: List[Dict[str, Any]] = file_results.get("commands", [])
run_command_error: Dict[str, Any] = commands_of_run[-1]
error_str: int = len(run_command_error.get("error", ""))
if error_str > 1:
error_type = run_command_error["error"].get("errorType", "")
error_code = run_command_error["error"].get("errorCode", "")
try:
# Instrument Error
error_instrument = run_command_error["error"]["errorInfo"]["node"]
except KeyError:
# Module Error
error_instrument = run_command_error["error"]["errorInfo"].get(
"port", ""
)
else:
error_type = file_results["errors"][0]["errorType"]
print(error_type)
error_code = file_results["errors"][0]["errorCode"]
error_instrument = file_results["errors"][0]["detail"]
for error in error_levels:
code_error = error[1]
if code_error == error_code:
error_level = error[4]
def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, str]:
"""Determines if errors exist in run log and documents them."""
error_levels = []
# Read error levels file
with open(ERROR_LEVELS_PATH, "r") as error_file:
error_levels = list(csv.reader(error_file))
num_of_errors = len(file_results["errors"])
if num_of_errors == 0:
error_type = ""
error_code = ""
error_instrument = ""
error_level = ""
return 0, error_type, error_code, error_instrument, error_level
commands_of_run: List[Dict[str, Any]] = file_results.get("commands", [])
run_command_error: Dict[str, Any] = commands_of_run[-1]
error_str: int = len(run_command_error.get("error", ""))
if error_str > 1:
error_type = run_command_error["error"].get("errorType", "")
error_code = run_command_error["error"].get("errorCode", "")
try:
# Instrument Error
error_instrument = run_command_error["error"]["errorInfo"]["node"]
except KeyError:
# Module Error
error_instrument = run_command_error["error"]["errorInfo"].get("port", "")
else:
error_type = file_results["errors"][0]["errorType"]
print(error_type)
error_code = file_results["errors"][0]["errorCode"]
error_instrument = file_results["errors"][0]["detail"]
for error in error_levels:
code_error = error[1]
if code_error == error_code:
error_level = error[4]

return num_of_errors, error_type, error_code, error_instrument, error_level
return num_of_errors, error_type, error_code, error_instrument, error_level

def write_to_abr_sheet(
runs_and_robots: Dict[Any, Dict[str, Any]],
storage_directory: str,
file_name_csv: str,
google_sheet: Any,
) -> None:
"""Write dict of data to abr csv."""
sheet_location = os.path.join(storage_directory, file_name_csv)
list_of_runs = list(runs_and_robots.keys())
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
for run in range(len(list_of_runs)):
row = runs_and_robots[list_of_runs[run]].values()
row_list = list(row)
writer.writerow(row_list)
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(3)

def read_abr_data_sheet(
storage_directory: str, file_name_csv: str, google_sheet: Any
) -> Set[str]:
"""Reads current run sheet to determine what new run data should be added."""
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
runs_in_sheet = set()
# Read the CSV file
with open(sheet_location, "r") as csv_start:
data = csv.DictReader(csv_start)
headers = data.fieldnames
if headers is not None:
for row in data:
run_id = row[headers[1]]
runs_in_sheet.add(run_id)
print(
f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet."
)
# Read Google Sheet
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.write_header(headers)
google_sheet.update_row_index()
return runs_in_sheet
def write_to_abr_sheet(
runs_and_robots: Dict[Any, Dict[str, Any]],
storage_directory: str,
file_name_csv: str,
google_sheet: Any,
) -> None:
"""Write dict of data to abr csv."""
sheet_location = os.path.join(storage_directory, file_name_csv)
list_of_runs = list(runs_and_robots.keys())
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
for run in range(len(list_of_runs)):
row = runs_and_robots[list_of_runs[run]].values()
row_list = list(row)
writer.writerow(row_list)
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(3)

def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
"""Read all files in storage directory, extracts run id, adds to set."""
os.makedirs(storage_directory, exist_ok=True)
list_of_files = os.listdir(storage_directory)
run_ids = set()
for this_file in list_of_files:
read_file = os.path.join(storage_directory, this_file)
if read_file.endswith(".json"):
file_results = json.load(open(read_file))
run_id = file_results.get("run_id", "")
if len(run_id) > 0:
run_ids.add(run_id)
return run_ids

def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
print(f"There are {str(len(runs_to_save))} new run(s) to save.")
return runs_to_save
def read_abr_data_sheet(
storage_directory: str, file_name_csv: str, google_sheet: Any
) -> Set[str]:
"""Reads current run sheet to determine what new run data should be added."""
print(file_name_csv)
sheet_location = os.path.join(storage_directory, file_name_csv)
runs_in_sheet = set()
# Read the CSV file
with open(sheet_location, "r") as csv_start:
data = csv.DictReader(csv_start)
headers = data.fieldnames
if headers is not None:
for row in data:
run_id = row[headers[1]]
runs_in_sheet.add(run_id)
print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.")
# Read Google Sheet
if google_sheet.creditals.access_token_expired:
google_sheet.gc.login()
google_sheet.write_header(headers)
google_sheet.update_row_index()
return runs_in_sheet


def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
"""Read all files in storage directory, extracts run id, adds to set."""
os.makedirs(storage_directory, exist_ok=True)
list_of_files = os.listdir(storage_directory)
run_ids = set()
for this_file in list_of_files:
read_file = os.path.join(storage_directory, this_file)
if read_file.endswith(".json"):
file_results = json.load(open(read_file))
run_id = file_results.get("run_id", "")
if len(run_id) > 0:
run_ids.add(run_id)
return run_ids


def get_unseen_run_ids(runs: Set[str], runs_from_storage: Set[str]) -> Set[str]:
"""Subtracts runs from storage from current runs being read."""
runs_to_save = runs - runs_from_storage
print(f"There are {str(len(runs_to_save))} new run(s) to save.")
return runs_to_save

0 comments on commit 8b11175

Please sign in to comment.