From 0c799fec1ab8df32918633ccf015c396ca18ab8d Mon Sep 17 00:00:00 2001 From: Rhyann Clarke <146747548+rclarke0@users.noreply.github.com> Date: Tue, 9 Apr 2024 13:09:41 -0400 Subject: [PATCH] Add errored runs to abr tracking sheet (#14845) # Overview Improved ABR Error Data Collection # Test Plan Tested code on multiple robots. # Changelog Added function to download robot logs Added lines of code to move error documents (run log, calibration log, robot logs) into folder named after ticket. Adds robot run to ABR sheet and links JIRA ticket Added extra lines to abr_scale to read scale more often Edited ABR calibration script to ensure duplicate calibrations are not added. # Review requests Is 5000 lines of recording enough to capture robot error if script is run immediately? Is there any manipulation to robot logs that can be down to make error analysis more efficient. # Risk assessment --- .../automation/google_drive_tool.py | 1 - .../automation/google_sheets_tool.py | 7 ++ .../abr_testing/automation/jira_tool.py | 11 +-- .../data_collection/abr_calibration_logs.py | 32 ++++++--- .../data_collection/abr_google_drive.py | 26 +++++-- .../abr_testing/data_collection/abr_lpc.py | 1 + .../data_collection/abr_robot_error.py | 67 ++++++++++++++++--- .../data_collection/read_robot_logs.py | 63 +++++++++++++++-- abr-testing/abr_testing/tools/abr_scale.py | 7 ++ 9 files changed, 179 insertions(+), 36 deletions(-) create mode 100644 abr-testing/abr_testing/data_collection/abr_lpc.py diff --git a/abr-testing/abr_testing/automation/google_drive_tool.py b/abr-testing/abr_testing/automation/google_drive_tool.py index 8b56d0390fe..3b65456d0ff 100644 --- a/abr-testing/abr_testing/automation/google_drive_tool.py +++ b/abr-testing/abr_testing/automation/google_drive_tool.py @@ -25,7 +25,6 @@ def __init__(self, credentials: Any, folder_name: str, email: str) -> None: self.drive_service = build("drive", "v3", credentials=self.credentials) self.parent_folder = folder_name self.email = email - self.folder = self.open_folder() def list_folder(self, delete: Any = False) -> Set[str]: """List folders and files in Google Drive.""" diff --git a/abr-testing/abr_testing/automation/google_sheets_tool.py b/abr-testing/abr_testing/automation/google_sheets_tool.py index e486a28fed2..af38a39dcc0 100644 --- a/abr-testing/abr_testing/automation/google_sheets_tool.py +++ b/abr-testing/abr_testing/automation/google_sheets_tool.py @@ -2,6 +2,7 @@ import gspread # type: ignore[import] import socket import httplib2 +from datetime import datetime from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import] from typing import Dict, List, Any, Set, Tuple @@ -57,6 +58,12 @@ def write_to_row(self, data: List) -> None: """Write data into a row in a List[] format.""" try: self.row_index += 1 + data = [ + item.strftime("%Y/%m/%d %H:%M:%S") + if isinstance(item, datetime) + else item + for item in data + ] self.worksheet.insert_row(data, index=self.row_index) except socket.gaierror: pass diff --git a/abr-testing/abr_testing/automation/jira_tool.py b/abr-testing/abr_testing/automation/jira_tool.py index aff3a6798c3..5c0a2556dfb 100644 --- a/abr-testing/abr_testing/automation/jira_tool.py +++ b/abr-testing/abr_testing/automation/jira_tool.py @@ -5,7 +5,7 @@ import json import webbrowser import argparse -from typing import List, Tuple +from typing import List class JiraTicket: @@ -41,11 +41,12 @@ def issues_on_board(self, board_id: str) -> List[str]: issue_ids.append(issue_id) return issue_ids - def open_issue(self, issue_key: str) -> None: + def open_issue(self, issue_key: str) -> str: """Open issue on web browser.""" url = f"{self.url}/browse/{issue_key}" print(f"Opening at {url}.") webbrowser.open(url) + return url def create_ticket( self, @@ -58,7 +59,7 @@ def create_ticket( components: list, affects_versions: str, robot: str, - ) -> Tuple[str, str]: + ) -> str: """Create ticket.""" data = { "fields": { @@ -94,13 +95,15 @@ def create_ticket( response_str = str(response.content) issue_url = response.json().get("self") issue_key = response.json().get("key") + print(f"issue key {issue_key}") + print(f"issue url{issue_url}") if issue_key is None: print("Error: Could not create issue. No key returned.") except requests.exceptions.HTTPError: print(f"HTTP error occurred. Response content: {response_str}") except json.JSONDecodeError: print(f"JSON decoding error occurred. Response content: {response_str}") - return issue_url, issue_key + return issue_key def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None: """Adds attachments to ticket.""" diff --git a/abr-testing/abr_testing/data_collection/abr_calibration_logs.py b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py index 6e897dd78eb..4d744b5b2f5 100644 --- a/abr-testing/abr_testing/data_collection/abr_calibration_logs.py +++ b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py @@ -1,5 +1,5 @@ """Get Calibration logs from robots.""" -from typing import Dict, Any, List +from typing import Dict, Any, List, Union import argparse import os import json @@ -16,15 +16,18 @@ def check_for_duplicates( col_2: int, row: List[str], headers: List[str], -) -> List[str]: +) -> Union[List[str], None]: """Check google sheet for duplicates.""" serials = google_sheet.get_column(col_1) modify_dates = google_sheet.get_column(col_2) - for serial, modify_date in zip(serials, modify_dates): - if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date: - print(f"Skipped row{row}. Already on Google Sheet.") - continue - read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers) + # check for complete calibration. + if len(row[-1]) > 0: + for serial, modify_date in zip(serials, modify_dates): + if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date: + print(f"Skipped row for instrument {serial}. Already on Google Sheet.") + return None + read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers) + print(f"Writing calibration for: {serial}") return row @@ -64,6 +67,7 @@ def upload_calibration_offsets( instrument_row, instrument_headers, ) + # MODULE SHEET if len(calibration.get("Modules", "")) > 0: module_headers = ( @@ -198,13 +202,19 @@ def upload_calibration_offsets( except FileNotFoundError: print(f"Add .json file with robot IPs to: {storage_directory}.") sys.exit() + if ip_or_all == "ALL": ip_address_list = ip_file["ip_address_list"] for ip in ip_address_list: - saved_file_path, calibration = read_robot_logs.get_calibration_offsets( - ip, storage_directory - ) - upload_calibration_offsets(calibration, storage_directory) + print(ip) + try: + saved_file_path, calibration = read_robot_logs.get_calibration_offsets( + ip, storage_directory + ) + upload_calibration_offsets(calibration, storage_directory) + except Exception: + print(f"ERROR: Failed to read IP address: {ip}") + continue else: saved_file_path, calibration = read_robot_logs.get_calibration_offsets( ip_or_all, storage_directory diff --git a/abr-testing/abr_testing/data_collection/abr_google_drive.py b/abr-testing/abr_testing/data_collection/abr_google_drive.py index 741ac871d62..6470f1e0410 100644 --- a/abr-testing/abr_testing/data_collection/abr_google_drive.py +++ b/abr-testing/abr_testing/data_collection/abr_google_drive.py @@ -6,7 +6,7 @@ import gspread # type: ignore[import] from datetime import datetime, timedelta from abr_testing.data_collection import read_robot_logs -from typing import Set, Dict, Any, Tuple, List +from typing import Set, Dict, Any, Tuple, List, Union from abr_testing.automation import google_drive_tool, google_sheets_tool @@ -30,7 +30,9 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]: def create_data_dictionary( - runs_to_save: Set[str], storage_directory: str + runs_to_save: Union[Set[str], str], + storage_directory: str, + issue_url: str, ) -> Tuple[Dict[Any, Dict[str, Any]], List]: """Pull data from run files and format into a dictionary.""" runs_and_robots = {} @@ -41,7 +43,7 @@ def create_data_dictionary( file_results = json.load(file) else: continue - run_id = file_results.get("run_id") + run_id = file_results.get("run_id", "NaN") if run_id in runs_to_save: robot = file_results.get("robot_name") protocol_name = file_results["protocol"]["metadata"].get("protocolName", "") @@ -56,6 +58,7 @@ def create_data_dictionary( error_instrument, error_level, ) = read_robot_logs.get_error_info(file_results) + all_modules = get_modules(file_results) start_time_str, complete_time_str, start_date, run_time_min = ( @@ -103,13 +106,14 @@ def create_data_dictionary( tc_dict = read_robot_logs.thermocycler_commands(file_results) hs_dict = read_robot_logs.hs_commands(file_results) tm_dict = read_robot_logs.temperature_module_commands(file_results) - notes = {"Note1": "", "Note2": ""} + notes = {"Note1": "", "Jira Link": issue_url} row_2 = {**row, **all_modules, **notes, **hs_dict, **tm_dict, **tc_dict} headers = list(row_2.keys()) runs_and_robots[run_id] = row_2 else: - os.remove(file_path) - print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.") + continue + # os.remove(file_path) + # print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.") return runs_and_robots, headers @@ -168,6 +172,14 @@ def create_data_dictionary( except gspread.exceptions.APIError: print("ERROR: Check google sheet name. Check credentials file.") sys.exit() + try: + google_sheet_lpc = google_sheets_tool.google_sheet( + credentials_path, "ABR-LPC", 0 + ) + print("Connected to google sheet ABR-LPC") + except gspread.exceptions.APIError: + print("ERROR: Check google sheet name. Check credentials file.") + sys.exit() run_ids_on_gs = google_sheet.get_column(2) run_ids_on_gs = set(run_ids_on_gs) @@ -181,7 +193,7 @@ def create_data_dictionary( ) # Add missing runs to google sheet runs_and_robots, headers = create_data_dictionary( - missing_runs_from_gs, storage_directory + missing_runs_from_gs, storage_directory, "" ) read_robot_logs.write_to_local_and_google_sheet( runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers diff --git a/abr-testing/abr_testing/data_collection/abr_lpc.py b/abr-testing/abr_testing/data_collection/abr_lpc.py new file mode 100644 index 00000000000..dd880d09c37 --- /dev/null +++ b/abr-testing/abr_testing/data_collection/abr_lpc.py @@ -0,0 +1 @@ +"""Get Unique LPC Values from Run logs.""" diff --git a/abr-testing/abr_testing/data_collection/abr_robot_error.py b/abr-testing/abr_testing/data_collection/abr_robot_error.py index 3f7302e8725..b139b5a3ade 100644 --- a/abr-testing/abr_testing/data_collection/abr_robot_error.py +++ b/abr-testing/abr_testing/data_collection/abr_robot_error.py @@ -3,7 +3,13 @@ from abr_testing.data_collection import read_robot_logs, abr_google_drive, get_run_logs import requests import argparse -from abr_testing.automation import jira_tool +from abr_testing.automation import jira_tool, google_sheets_tool, google_drive_tool +import shutil +import os +import subprocess +import json +import sys +import gspread # type: ignore[import] def get_error_runs_from_robot(ip: str) -> List[str]: @@ -44,7 +50,6 @@ def get_error_info_from_robot( # JIRA Ticket Fields failure_level = "Level " + str(error_level) + " Failure" components = [failure_level, "Flex-RABR"] - components = ["Flex-RABR"] affects_version = results["API_Version"] parent = results.get("robot_name", "") print(parent) @@ -140,18 +145,19 @@ def get_error_info_from_robot( affects_version, components, whole_description_str, - saved_file_path, + run_log_file_path, ) = get_error_info_from_robot(ip, one_run, storage_directory) # get calibration data saved_file_path_calibration, calibration = read_robot_logs.get_calibration_offsets( ip, storage_directory ) + file_paths = read_robot_logs.get_logs(storage_directory, ip) print(f"Making ticket for run: {one_run} on robot {robot}.") # TODO: make argument or see if I can get rid of with using board_id. project_key = "RABR" parent_key = project_key + "-" + robot[-1] - issues_ids = ticket.issues_on_board(board_id) - issue_url, issue_key = ticket.create_ticket( + # CREATE TICKET + issue_key = ticket.create_ticket( summary, whole_description_str, project_key, @@ -162,6 +168,51 @@ def get_error_info_from_robot( affects_version, parent_key, ) - ticket.open_issue(issue_key) - ticket.post_attachment_to_ticket(issue_key, saved_file_path) - ticket.post_attachment_to_ticket(issue_key, saved_file_path_calibration) + # OPEN TICKET + issue_url = ticket.open_issue(issue_key) + # MOVE FILES TO ERROR FOLDER. + error_files = [saved_file_path_calibration, run_log_file_path] + file_paths + error_folder_path = os.path.join(storage_directory, str("RABR-238")) + os.makedirs(error_folder_path, exist_ok=True) + for source_file in error_files: + destination_file = os.path.join( + error_folder_path, os.path.basename(source_file) + ) + shutil.move(source_file, destination_file) + # OPEN FOLDER DIRECTORY + subprocess.Popen(["explorer", error_folder_path]) + # CONNECT TO GOOGLE DRIVE + credentials_path = os.path.join(storage_directory, "credentials.json") + google_sheet_name = "ABR-run-data" + try: + google_drive = google_drive_tool.google_drive( + credentials_path, + "1Cvej0eadFOTZr9ILRXJ0Wg65ymOtxL4m", + "rhyann.clarke@opentrons.ocm", + ) + print("Connected to google drive.") + except json.decoder.JSONDecodeError: + print( + "Credential file is damaged. Get from https://console.cloud.google.com/apis/credentials" + ) + sys.exit() + # CONNECT TO GOOGLE SHEET + try: + google_sheet = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 0 + ) + print(f"Connected to google sheet: {google_sheet_name}") + except gspread.exceptions.APIError: + print("ERROR: Check google sheet name. Check credentials file.") + sys.exit() + # WRITE ERRORED RUN TO GOOGLE SHEET + error_run_log = os.path.join(error_folder_path, os.path.basename(run_log_file_path)) + google_drive.upload_file(error_run_log) + run_id = os.path.basename(error_run_log).split("_")[1].split(".")[0] + runs_and_robots, headers = abr_google_drive.create_data_dictionary( + run_id, error_folder_path, issue_url + ) + read_robot_logs.write_to_local_and_google_sheet( + runs_and_robots, storage_directory, google_sheet_name, google_sheet, headers + ) + print("Wrote run to ABR-run-data") diff --git a/abr-testing/abr_testing/data_collection/read_robot_logs.py b/abr-testing/abr_testing/data_collection/read_robot_logs.py index 0e31603b7da..48ef1d20163 100644 --- a/abr-testing/abr_testing/data_collection/read_robot_logs.py +++ b/abr-testing/abr_testing/data_collection/read_robot_logs.py @@ -14,6 +14,35 @@ import requests +def lpc_data(file_results: Dict[str, Any], protocol_info: Dict) -> List[Dict[str, Any]]: + """Get labware offsets from one run log.""" + offsets = file_results.get("labwareOffsets", "") + all_offsets: List[Dict[str, Any]] = [] + if len(offsets) > 0: + for offset in offsets: + labware_type = offset.get("definitionUri", "") + slot = offset["location"].get("slotName", "") + module_location = offset["location"].get("moduleModel", "") + adapter = offset["location"].get("definitionUri", "") + x_offset = offset["vector"].get("x", 0.0) + y_offset = offset["vector"].get("y", 0.0) + z_offset = offset["vector"].get("z", 0.0) + created_at = offset.get("createdAt", "") + row = { + "createdAt": created_at, + "Labware Type": labware_type, + "Slot": slot, + "Module": module_location, + "Adapter": adapter, + "X": x_offset, + "Y": y_offset, + "Z": z_offset, + } + row2 = {**protocol_info, **row} + all_offsets.append(row2) + return all_offsets + + def command_time(command: Dict[str, str]) -> Tuple[float, float]: """Calculate total create and complete time per command.""" try: @@ -82,11 +111,11 @@ def hs_commands(file_results: Dict[str, Any]) -> Dict[str, float]: temp_time = datetime.strptime( command.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z" ) - + hs_latch_sets = hs_latch_count / 2 # one set of open/close hs_total_rotations = sum(hs_rotations.values()) hs_total_temp_time = sum(hs_temps.values()) hs_dict = { - "Heatershaker # of Latch Engagements": hs_latch_count, + "Heatershaker # of Latch Open/Close": hs_latch_sets, "Heatershaker # of Homes": hs_home_count, "Heatershaker # of Rotations": hs_total_rotations, "Heatershaker Temp On Time (sec)": hs_total_temp_time, @@ -206,9 +235,9 @@ def thermocycler_commands(file_results: Dict[str, Any]) -> Dict[str, float]: block_total_time = sum(block_temps.values()) lid_total_time = sum(lid_temps.values()) - + lid_sets = lid_engagements / 2 tc_dict = { - "Thermocycler # of Lid Engagements": lid_engagements, + "Thermocycler # of Lid Open/Close": lid_sets, "Thermocycler Block # of Temp Changes": block_temp_changes, "Thermocycler Block Temp On Time (sec)": block_total_time, "Thermocycler Lid # of Temp Changes": lid_temp_changes, @@ -223,7 +252,6 @@ def create_abr_data_sheet( ) -> str: """Creates csv file to log ABR data.""" file_name_csv = file_name + ".csv" - print(file_name_csv) sheet_location = os.path.join(storage_directory, file_name_csv) if os.path.exists(sheet_location): print(f"File {sheet_location} located. Not overwriting.") @@ -427,3 +455,28 @@ def get_calibration_offsets( saved_file_path = os.path.join(storage_directory, save_name) json.dump(calibration, open(saved_file_path, mode="w")) return saved_file_path, calibration + + +def get_logs(storage_directory: str, ip: str) -> List[str]: + """Get Robot logs.""" + log_types = ["api.log", "server.log", "serial.log", "touchscreen.log"] + all_paths = [] + for log_type in log_types: + try: + response = requests.get( + f"http://{ip}:31950/logs/{log_type}", + headers={"log_identifier": log_type}, + params={"records": 5000}, + ) + response.raise_for_status() + log_data = response.text + log_name = ip + "_" + log_type.split(".")[0] + ".json" + file_path = os.path.join(storage_directory, log_name) + with open(file_path, mode="w", encoding="utf-8") as file: + file.write(response.text) + json.dump(log_data, open(file_path, mode="w")) + except RuntimeError: + print(f"Request exception. Did not save {log_type}") + continue + all_paths.append(file_path) + return all_paths diff --git a/abr-testing/abr_testing/tools/abr_scale.py b/abr-testing/abr_testing/tools/abr_scale.py index 0947091fe4b..75c887d4ecc 100644 --- a/abr-testing/abr_testing/tools/abr_scale.py +++ b/abr-testing/abr_testing/tools/abr_scale.py @@ -73,8 +73,12 @@ print("No google sheets credentials. Add credentials to storage notebook.") # Scale Loop + grams, is_stable = scale.read_mass() + grams, is_stable = scale.read_mass() + is_stable = False break_all = False while is_stable is False: + grams, is_stable = scale.read_mass() grams, is_stable = scale.read_mass() print(f"Scale reading: grams={grams}, is_stable={is_stable}") time_now = datetime.datetime.now() @@ -90,9 +94,12 @@ y_or_no = input("Do you want to weigh another sample? (Y/N): ") if y_or_no == "Y": # Uses same storage directory and file. + grams, is_stable = scale.read_mass() + is_stable = False robot = input("Robot: ") labware = input("Labware: ") protocol_step = input("Measurement Step (1,2,3): ") + grams, is_stable = scale.read_mass() elif y_or_no == "N": break_all = True if break_all: