Skip to content

Commit

Permalink
Automated ABR Calibration Data Uploading (#14782)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Pulls Calibration Data from Robots and Uploads to
google_drive/google_sheet

# Test Plan

Tested on ABR robots. Successfully pulls calibration data, uploads to
google drive, and saves to google sheet.

# Changelog

- Adds abr_calibration_logs.py 

1. Connects to google drive folder
2. Connects to google sheet
3. Pulls module, instrument, and deck calibration data and compiles into
one .json file per robot via http requests
4. Uploads new files to google drive folder
5. adds new rows to instrument, module, and deck calibration sheets if
the serial and calibration lastmodified timestamp pairing do not already
exist

- Split jira_tool up into a file with just jira_tools and a file that
uses the tools with the robots.
- For all scripts uploading to google drive, changed the folder_name
argument to folder_id so that the service_account is writing to the
correct folder. Adds email as argument to allow for permission sharing
by service account.

# Review requests

<!--
Describe any requests for your reviewers here.
-->

# Risk assessment

<!--
Carefully go over your pull request and look at the other parts of the
codebase it may affect. Look for the possibility, even if you think it's
small, that your change may affect some other part of the system - for
instance, changing return tip behavior in protocol may also change the
behavior of labware calibration.

Identify the other parts of the system your codebase may affect, so that
in addition to your own review and testing, other people who may not
have the system internalized as much as you can focus their attention
and testing there.
-->
  • Loading branch information
rclarke0 authored Apr 3, 2024
1 parent 6ccb243 commit 80abd2e
Show file tree
Hide file tree
Showing 8 changed files with 514 additions and 165 deletions.
60 changes: 51 additions & 9 deletions abr-testing/abr_testing/automation/google_drive_tool.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
"""Google Drive Tool."""
import os
from typing import Set, Any
from typing import Set, Any, Optional
import webbrowser
import mimetypes
from oauth2client.service_account import ServiceAccountCredentials # type: ignore[import]
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
Expand All @@ -14,15 +16,16 @@
class google_drive:
"""Google Drive Tool."""

def __init__(self, credentials: Any, folder_name: str, parent_folder: Any) -> None:
def __init__(self, credentials: Any, folder_name: str, email: str) -> None:
"""Connects to google drive via credentials file."""
self.scope = ["https://www.googleapis.com/auth/drive"]
self.credentials = ServiceAccountCredentials.from_json_keyfile_name(
credentials, self.scope
)
self.drive_service = build("drive", "v3", credentials=self.credentials)
self.folder_name = folder_name
self.parent_folder = parent_folder
self.parent_folder = folder_name
self.email = email
self.folder = self.open_folder()

def list_folder(self, delete: Any = False) -> Set[str]:
"""List folders and files in Google Drive."""
Expand Down Expand Up @@ -72,26 +75,37 @@ def upload_file(self, file_path: str) -> str:
"""Upload file to Google Drive."""
file_metadata = {
"name": os.path.basename(file_path),
"mimeType": "application/vnd.google-apps.folder",
"parents": [self.parent_folder] if self.parent_folder else "",
"mimeType": str(mimetypes.guess_type(file_path)[0]),
"parents": [self.parent_folder],
}

media = MediaFileUpload(file_path, resumable=True)

uploaded_file = (
self.drive_service.files()
.create(body=file_metadata, media_body=media, fields="id") # type: ignore
.execute()
)

return uploaded_file["id"]

def upload_missing_files(self, storage_directory: str, missing_files: set) -> None:
def upload_missing_files(self, storage_directory: str) -> None:
"""Upload missing files to Google Drive."""
# Read Google Drive .json files.
google_drive_files = self.list_folder()
google_drive_files_json = [
file for file in google_drive_files if file.endswith(".json")
]
# Read local directory.
local_files_json = set(
file for file in os.listdir(storage_directory) if file.endswith(".json")
)
missing_files = local_files_json - set(google_drive_files_json)
print(f"Missing files: {len(missing_files)}")
# Upload missing files.
uploaded_files = []
for file in missing_files:
file_path = os.path.join(storage_directory, file)
uploaded_file_id = google_drive.upload_file(self, file_path)
self.share_permissions(uploaded_file_id)
uploaded_files.append(
{"name": os.path.basename(file_path), "id": uploaded_file_id}
)
Expand All @@ -108,3 +122,31 @@ def upload_missing_files(self, storage_directory: str, missing_files: set) -> No
print(
f"File '{this_name}' was not found in the list of files after uploading."
)

def open_folder(self) -> Optional[str]:
"""Open folder in web browser."""
folder_metadata = (
self.drive_service.files()
.get(fileId=self.parent_folder, fields="webViewLink")
.execute()
)
folder_link = folder_metadata.get("webViewLink")
if folder_link:
print(f"Folder link: {folder_link}")
webbrowser.open(
folder_link
) # Open the folder link in the default web browser
else:
print("Folder link not found.")
return folder_link

def share_permissions(self, file_id: str) -> None:
"""Share permissions with self."""
new_permission = {
"type": "user",
"role": "writer",
"emailAddress": self.email,
}
self.drive_service.permissions().create(
fileId=file_id, body=new_permission, transferOwnership=False # type: ignore
).execute()
114 changes: 0 additions & 114 deletions abr-testing/abr_testing/automation/jira_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,77 +6,6 @@
import webbrowser
import argparse
from typing import List, Tuple
from abr_testing.data_collection import read_robot_logs, abr_google_drive, get_run_logs


def get_error_runs_from_robot(ip: str) -> List[str]:
"""Get runs that have errors from robot."""
error_run_ids = []
response = requests.get(
f"http://{ip}:31950/runs", headers={"opentrons-version": "3"}
)
run_data = response.json()
run_list = run_data["data"]
for run in run_list:
run_id = run["id"]
num_of_errors = len(run["errors"])
if not run["current"] and num_of_errors > 0:
error_run_ids.append(run_id)
return error_run_ids


def get_error_info_from_robot(
ip: str, one_run: str, storage_directory: str
) -> Tuple[str, str, str, List[str], str, str]:
"""Get error information from robot to fill out ticket."""
description = dict()
# get run information
results = get_run_logs.get_run_data(one_run, ip)
# save run information to local directory as .json file
saved_file_path = read_robot_logs.save_run_log_to_json(
ip, results, storage_directory
)

# Error Printout
(
num_of_errors,
error_type,
error_code,
error_instrument,
error_level,
) = read_robot_logs.get_error_info(results)
# JIRA Ticket Fields
failure_level = "Level " + str(error_level) + " Failure"
components = [failure_level, "Flex-RABR"]
affects_version = results["API_Version"]
parent = results.get("robot_name", "")
print(parent)
summary = parent + "_" + str(one_run) + "_" + str(error_code) + "_" + error_type
# Description of error
description["protocol_name"] = results["protocol"]["metadata"].get(
"protocolName", ""
)
description["error"] = " ".join([error_code, error_type, error_instrument])
description["protocol_step"] = list(results["commands"])[-1]
description["right_mount"] = results.get("right", "No attachment")
description["left_mount"] = results.get("left", "No attachment")
description["gripper"] = results.get("extension", "No attachment")
all_modules = abr_google_drive.get_modules(results)
whole_description = {**description, **all_modules}
whole_description_str = (
"{"
+ "\n".join("{!r}: {!r},".format(k, v) for k, v in whole_description.items())
+ "}"
)

return (
summary,
parent,
affects_version,
components,
whole_description_str,
saved_file_path,
)


class JiraTicket:
Expand Down Expand Up @@ -193,20 +122,6 @@ def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None
if __name__ == "__main__":
"""Create ticket for specified robot."""
parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.")
parser.add_argument(
"storage_directory",
metavar="STORAGE_DIRECTORY",
type=str,
nargs=1,
help="Path to long term storage directory for run logs.",
)
parser.add_argument(
"robot_ip",
metavar="ROBOT_IP",
type=str,
nargs=1,
help="IP address of robot as string.",
)
parser.add_argument(
"jira_api_token",
metavar="JIRA_API_TOKEN",
Expand Down Expand Up @@ -238,38 +153,9 @@ def post_attachment_to_ticket(self, issue_id: str, attachment_path: str) -> None
help="JIRA Board ID. RABR is 217",
)
args = parser.parse_args()
storage_directory = args.storage_directory[0]
ip = args.robot_ip[0]
url = "https://opentrons.atlassian.net"
api_token = args.jira_api_token[0]
email = args.email[0]
board_id = args.board_id[0]
reporter_id = args.reporter_id[0]
ticket = JiraTicket(url, api_token, email)
error_runs = get_error_runs_from_robot(ip)
one_run = error_runs[-1] # Most recent run with error.
(
summary,
robot,
affects_version,
components,
whole_description_str,
saved_file_path,
) = get_error_info_from_robot(ip, one_run, storage_directory)
print(f"Making ticket for run: {one_run} on robot {robot}.")
# TODO: make argument or see if I can get rid of with using board_id.
project_key = "RABR"
parent_key = project_key + "-" + robot[-1]
issue_url, issue_key = ticket.create_ticket(
summary,
whole_description_str,
project_key,
reporter_id,
"Bug",
"Medium",
components,
affects_version,
parent_key,
)
ticket.open_issue(issue_key)
ticket.post_attachment_to_ticket(issue_key, saved_file_path)
Loading

0 comments on commit 80abd2e

Please sign in to comment.