Skip to content

Commit

Permalink
adding ip address and google sheet path as arguments (#14676)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Added Google Sheet Path as Arguments and IP import from
storage_directory

# Test Plan

Tested with different google sheet to ensure that new names work.

# Changelog

**abr_run_logs.py**
In get_all_run_logs function added
` try:
        sys.path.insert(0, storage_directory)
        import IPs  # type: ignore[import]

        ip_address_list = IPs.ip_address_list
    except ImportError:
raise ImportError("Make sure Ip address file is saved in storage
directory.")`
**abr_read_logs.py**
changed credentials file to be called credentials.json instead of
abr.json
added google sheet path and tab as arguments.
`parser.add_argument(
        "file_name",
        metavar="FILE_NAME",
        type=str,
        nargs=1,
        help="Name of google sheet and local csv to save data to.",
    )
    parser.add_argument(
        "google_sheet_tab_number",
        metavar="GOOGLE_SHEET_TAB_NUMBER",
        type=int,
        nargs=1,
        help="Google sheet tab number.",
    )
    args = parser.parse_args()
    storage_directory = args.storage_directory[0]
    file_name = args.file_name[0]
    tab_number = args.google_sheet_tab_number[0]`

changed local file name to be same as google sheet name
`def create_abr_data_sheet(storage_directory: str, file_name: str) ->
str:
    """Creates csv file to log ABR data."""
    file_name_csv = file_name + ".csv"
    sheet_location = os.path.join(storage_directory, file_name_csv)`

Changed ip adress file to be read as .json.
# Review requests

Determine if there is any other lines in this code that make it too abr
specific

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 authored Mar 15, 2024
1 parent 574f793 commit 6695f9e
Show file tree
Hide file tree
Showing 3 changed files with 41 additions and 29 deletions.
47 changes: 35 additions & 12 deletions hardware-testing/hardware_testing/abr_tools/abr_read_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,10 @@ def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, st
return num_of_errors, error_type, error_code, error_instrument, error_level


def create_abr_data_sheet(storage_directory: str) -> None:
def create_abr_data_sheet(storage_directory: str, file_name: str) -> str:
"""Creates csv file to log ABR data."""
sheet_location = os.path.join(storage_directory, "ABR-run-data.csv")
file_name_csv = file_name + ".csv"
sheet_location = os.path.join(storage_directory, file_name_csv)
if os.path.exists(sheet_location):
print(f"File {sheet_location} located. Not overwriting.")
else:
Expand Down Expand Up @@ -100,6 +101,7 @@ def create_abr_data_sheet(storage_directory: str) -> None:
writer = csv.DictWriter(csvfile, fieldnames=headers)
writer.writeheader()
print(f"Created file. Located: {sheet_location}.")
return file_name_csv


def create_data_dictionary(
Expand Down Expand Up @@ -181,9 +183,9 @@ def create_data_dictionary(
return runs_and_robots


def read_abr_data_sheet(storage_directory: str) -> Set[str]:
def read_abr_data_sheet(storage_directory: str, file_name_csv: str) -> Set[str]:
"""Reads current run sheet to determine what new run data should be added."""
sheet_location = os.path.join(storage_directory, "ABR-run-data.csv")
sheet_location = os.path.join(storage_directory, file_name_csv)
runs_in_sheet = set()
# Read the CSV file
with open(sheet_location, "r") as csv_start:
Expand All @@ -201,10 +203,12 @@ def read_abr_data_sheet(storage_directory: str) -> Set[str]:


def write_to_abr_sheet(
runs_and_robots: Dict[Any, Dict[str, Any]], storage_directory: str
runs_and_robots: Dict[Any, Dict[str, Any]],
storage_directory: str,
file_name_csv: str,
) -> None:
"""Write dict of data to abr csv."""
sheet_location = os.path.join(storage_directory, "ABR-run-data.csv")
sheet_location = os.path.join(storage_directory, file_name_csv)
list_of_runs = list(runs_and_robots.keys())
with open(sheet_location, "a", newline="") as f:
writer = csv.writer(f)
Expand All @@ -226,25 +230,44 @@ def write_to_abr_sheet(
nargs=1,
help="Path to long term storage directory for run logs.",
)
parser.add_argument(
"file_name",
metavar="FILE_NAME",
type=str,
nargs=1,
help="Name of google sheet and local csv to save data to.",
)
parser.add_argument(
"google_sheet_tab_number",
metavar="GOOGLE_SHEET_TAB_NUMBER",
type=int,
nargs=1,
help="Google sheet tab number.",
)
args = parser.parse_args()
storage_directory = args.storage_directory[0]
file_name = args.file_name[0]
tab_number = args.google_sheet_tab_number[0]
try:
sys.path.insert(0, storage_directory)
import google_sheets_tool # type: ignore[import]

credentials_path = os.path.join(storage_directory, "abr.json")
credentials_path = os.path.join(storage_directory, "credentials.json")
except ImportError:
raise ImportError("Make sure google_sheets_tool.py is in storage directory.")
raise ImportError(
"Check for google_sheets_tool.py and credentials.json in storage directory."
)
try:
google_sheet = google_sheets_tool.google_sheet(
credentials_path, "ABR Run Data", tab_number=0
credentials_path, file_name, tab_number=tab_number
)
print("Connected to google sheet.")
except FileNotFoundError:
print("No google sheets credentials. Add credentials to storage notebook.")

runs_from_storage = get_run_ids_from_storage(storage_directory)
create_abr_data_sheet(storage_directory)
runs_in_sheet = read_abr_data_sheet(storage_directory)
file_name_csv = create_abr_data_sheet(storage_directory, file_name)
runs_in_sheet = read_abr_data_sheet(storage_directory, file_name_csv)
runs_to_save = get_unseen_run_ids(runs_from_storage, runs_in_sheet)
runs_and_robots = create_data_dictionary(runs_to_save, storage_directory)
write_to_abr_sheet(runs_and_robots, storage_directory)
write_to_abr_sheet(runs_and_robots, storage_directory, file_name_csv)
15 changes: 0 additions & 15 deletions hardware-testing/hardware_testing/abr_tools/abr_robots.py

This file was deleted.

8 changes: 6 additions & 2 deletions hardware-testing/hardware_testing/abr_tools/abr_run_logs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
"""ABR Run Log Pull."""
from .abr_robots import ABR_IPS
from typing import Set, Dict, Any
import argparse
import os
Expand Down Expand Up @@ -112,8 +111,13 @@ def get_all_run_logs(storage_directory: str) -> None:
Read each robot's list of unique run log IDs and compare them to all IDs in storage.
Any ID that is not in storage, download the run log and put it in storage.
"""
ip_json_file = os.path.join(storage_directory, "IPs.json")
ip_file = json.load(open(ip_json_file))
ip_address_list = ip_file["ip_address_list"]
print(ip_address_list)

runs_from_storage = get_run_ids_from_storage(storage_directory)
for ip in ABR_IPS:
for ip in ip_address_list:
try:
runs = get_run_ids_from_robot(ip)
runs_to_save = get_unseen_run_ids(runs, runs_from_storage)
Expand Down

0 comments on commit 6695f9e

Please sign in to comment.