diff --git a/.github/workflows/app-test-build-deploy.yaml b/.github/workflows/app-test-build-deploy.yaml index 8c3bd21503d..d3c03e1f500 100644 --- a/.github/workflows/app-test-build-deploy.yaml +++ b/.github/workflows/app-test-build-deploy.yaml @@ -97,7 +97,11 @@ jobs: strategy: matrix: os: ['windows-2022', 'ubuntu-22.04', 'macos-latest'] - name: 'opentrons app backend unit tests on ${{matrix.os}}' + shell: ['app-shell', 'app-shell-odd', 'discovery-client'] + exclude: + - os: 'windows-2022' + shell: 'app-shell-odd' + name: 'opentrons ${{matrix.shell}} unit tests on ${{matrix.os}}' timeout-minutes: 60 runs-on: ${{ matrix.os }} steps: @@ -144,7 +148,7 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'test native(er) packages' - run: make test-js-internal tests="app-shell/src app-shell-odd/src discovery-client/src" cov_opts="--coverage=true" + run: make test-js-internal tests="${{}matrix.shell}/src" cov_opts="--coverage=true" - name: 'Upload coverage report' uses: 'codecov/codecov-action@v3' with: diff --git a/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml b/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml index af767b36adc..7a89bfa02dd 100644 --- a/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml +++ b/.github/workflows/opentrons-ai-client-staging-continuous-deploy.yaml @@ -52,6 +52,9 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'build' + env: + # inject dev id since this is for staging + OT_AI_CLIENT_MIXPANEL_ID: ${{ secrets.OT_AI_CLIENT_MIXPANEL_DEV_ID }} run: | make -C opentrons-ai-client build-staging - name: Configure AWS Credentials diff --git a/.github/workflows/opentrons-ai-client-test-build-deploy.yaml b/.github/workflows/opentrons-ai-client-test.yaml similarity index 90% rename from .github/workflows/opentrons-ai-client-test-build-deploy.yaml rename to .github/workflows/opentrons-ai-client-test.yaml index 2f569d9bf78..2c5cc6cfc64 100644 --- a/.github/workflows/opentrons-ai-client-test-build-deploy.yaml +++ b/.github/workflows/opentrons-ai-client-test.yaml @@ -9,12 +9,9 @@ on: paths: - 'Makefile' - 'opentrons-ai-client/**/*' - - 'components/**/*' - - '*.js' - - '*.json' - - 'yarn.lock' - - '.github/workflows/app-test-build-deploy.yaml' - - '.github/workflows/utils.js' + - 'components/**' + - 'shared-data/**' + - '.github/workflows/opentrons-ai-client-test.yml' branches: - '**' tags: @@ -24,10 +21,9 @@ on: paths: - 'Makefile' - 'opentrons-ai-client/**/*' - - 'components/**/*' - - '*.js' - - '*.json' - - 'yarn.lock' + - 'components/**' + - 'shared-data/**' + - '.github/workflows/opentrons-ai-client-test.yml' workflow_dispatch: concurrency: diff --git a/.github/workflows/opentrons-ai-production-deploy.yaml b/.github/workflows/opentrons-ai-production-deploy.yaml index 825c3561f25..2327b48ecad 100644 --- a/.github/workflows/opentrons-ai-production-deploy.yaml +++ b/.github/workflows/opentrons-ai-production-deploy.yaml @@ -52,6 +52,8 @@ jobs: yarn config set cache-folder ${{ github.workspace }}/.yarn-cache make setup-js - name: 'build' + env: + OT_AI_CLIENT_MIXPANEL_ID: ${{ secrets.OT_AI_CLIENT_MIXPANEL_ID }} run: | make -C opentrons-ai-client build-production - name: Configure AWS Credentials diff --git a/abr-testing/Makefile b/abr-testing/Makefile index f711579ff57..b9f92229177 100644 --- a/abr-testing/Makefile +++ b/abr-testing/Makefile @@ -88,3 +88,14 @@ push-no-restart-ot3: sdist Pipfile.lock .PHONY: push-ot3 push-ot3: push-no-restart-ot3 + +.PHONY: abr-setup +abr-setup: + $(python) abr_testing/tools/abr_setup.py + +.PHONY: simulate +PROTOCOL_DIR := abr_testing/protocols +SIMULATION_TOOL := protocol_simulation/abr_sim_check.py +EXTENSION := .py +simulate: + $(python) $(SIMULATION_TOOL) \ No newline at end of file diff --git a/abr-testing/abr_testing/automation/google_sheets_tool.py b/abr-testing/abr_testing/automation/google_sheets_tool.py index 3ca3bd38f9b..d284a13a241 100644 --- a/abr-testing/abr_testing/automation/google_sheets_tool.py +++ b/abr-testing/abr_testing/automation/google_sheets_tool.py @@ -167,6 +167,7 @@ def column_letter_to_index(column_letter: str) -> int: self.spread_sheet.batch_update(body=body) except gspread.exceptions.APIError as e: print(f"ERROR MESSAGE: {e}") + raise def update_cell( self, sheet_title: str, row: int, column: int, single_data: Any diff --git a/abr-testing/abr_testing/data_collection/abr_calibration_logs.py b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py index 82d9d9c45bc..f25c89d8435 100644 --- a/abr-testing/abr_testing/data_collection/abr_calibration_logs.py +++ b/abr-testing/abr_testing/data_collection/abr_calibration_logs.py @@ -1,129 +1,327 @@ """Get Calibration logs from robots.""" -from typing import Dict, Any, List, Union +from typing import Dict, Any, List, Set import argparse import os import json import sys -import time as t +import traceback from abr_testing.data_collection import read_robot_logs from abr_testing.automation import google_drive_tool, google_sheets_tool -def check_for_duplicates( - sheet_location: str, - google_sheet: Any, - col_1: int, - col_2: int, - row: List[str], - headers: List[str], -) -> Union[List[str], None]: - """Check google sheet for duplicates.""" - t.sleep(5) - serials = google_sheet.get_column(col_1) - modify_dates = google_sheet.get_column(col_2) - # Check for calibration time stamp. - if row[-1] is not None: - if len(row[-1]) > 0: - for serial, modify_date in zip(serials, modify_dates): - if row[col_1 - 1] == serial and row[col_2 - 1] == modify_date: - print( - f"Skipped row for instrument {serial}. Already on Google Sheet." - ) - return None - read_robot_logs.write_to_sheets(sheet_location, google_sheet, row, headers) - print(f"Writing calibration for: {row[7]}") - return row - - -def upload_calibration_offsets( - calibration: Dict[str, Any], storage_directory: str -) -> None: - """Upload calibration data to google_sheet.""" - # Common Headers - headers_beg = list(calibration.keys())[:4] - headers_end = list(["X", "Y", "Z", "lastModified"]) +def instrument_helper( + headers_beg: List[str], + headers_end: List[str], + calibration_log: Dict[Any, Any], + google_sheet_name: str, + inst_sheet_serials: Set[str], + inst_sheet_modify_dates: Set[str], + storage_directory: str, +) -> List[Any]: + """Helper for parsing instrument calibration data.""" + # Populate Instruments # INSTRUMENT SHEET + instruments_upload_rows: List[Any] = [] instrument_headers = ( - headers_beg + list(calibration["Instruments"][0].keys())[:7] + headers_end + headers_beg + list(calibration_log["Instruments"][0].keys())[:7] + headers_end ) local_instrument_file = google_sheet_name + "-Instruments" - instrument_sheet_location = read_robot_logs.create_abr_data_sheet( + read_robot_logs.create_abr_data_sheet( storage_directory, local_instrument_file, instrument_headers ) # INSTRUMENTS DATA - instruments = calibration["Instruments"] + instruments = calibration_log["Instruments"] for instrument in range(len(instruments)): one_instrument = instruments[instrument] + inst_serial = one_instrument["serialNumber"] + modified = one_instrument["data"]["calibratedOffset"].get("last_modified", "") + if inst_serial in inst_sheet_serials and modified in inst_sheet_modify_dates: + continue x = one_instrument["data"]["calibratedOffset"]["offset"].get("x", "") y = one_instrument["data"]["calibratedOffset"]["offset"].get("y", "") z = one_instrument["data"]["calibratedOffset"]["offset"].get("z", "") - modified = one_instrument["data"]["calibratedOffset"].get("last_modified", "") instrument_row = ( - list(calibration.values())[:4] + list(calibration_log.values())[:4] + list(one_instrument.values())[:7] + list([x, y, z, modified]) ) - check_for_duplicates( - instrument_sheet_location, - google_sheet_instruments, - 8, - 15, - instrument_row, - instrument_headers, - ) + instruments_upload_rows.append(instrument_row) + return instruments_upload_rows + +def module_helper( + headers_beg: List[str], + headers_end: List[str], + calibration_log: Dict[Any, Any], + google_sheet_name: str, + module_sheet_serials: Set[str], + module_modify_dates: Set[str], + storage_directory: str, +) -> List[Any]: + """Helper for parsing module calibration data.""" + # Populate Modules # MODULE SHEET - if len(calibration.get("Modules", "")) > 0: + modules_upload_rows: List[Any] = [] + if len(calibration_log.get("Modules", "")) > 0: module_headers = ( - headers_beg + list(calibration["Modules"][0].keys())[:7] + headers_end + headers_beg + list(calibration_log["Modules"][0].keys())[:7] + headers_end ) local_modules_file = google_sheet_name + "-Modules" - modules_sheet_location = read_robot_logs.create_abr_data_sheet( + read_robot_logs.create_abr_data_sheet( storage_directory, local_modules_file, module_headers ) # MODULES DATA - modules = calibration["Modules"] + modules = calibration_log["Modules"] for module in range(len(modules)): one_module = modules[module] - x = one_module["moduleOffset"]["offset"].get("x", "") - y = one_module["moduleOffset"]["offset"].get("y", "") - z = one_module["moduleOffset"]["offset"].get("z", "") - modified = one_module["moduleOffset"].get("last_modified", "") + mod_serial = one_module["serialNumber"] + modified = "No data" + x = "" + y = "" + z = "" + try: + modified = one_module["moduleOffset"].get("last_modified", "") + x = one_module["moduleOffset"]["offset"].get("x", "") + y = one_module["moduleOffset"]["offset"].get("y", "") + z = one_module["moduleOffset"]["offset"].get("z", "") + except KeyError: + pass + if mod_serial in module_sheet_serials and modified in module_modify_dates: + continue module_row = ( - list(calibration.values())[:4] + list(calibration_log.values())[:4] + list(one_module.values())[:7] + list([x, y, z, modified]) ) - check_for_duplicates( - modules_sheet_location, - google_sheet_modules, - 8, - 15, - module_row, - module_headers, - ) + modules_upload_rows.append(module_row) + return modules_upload_rows + + +def deck_helper( + headers_beg: List[str], + headers_end: List[str], + calibration_log: Dict[Any, Any], + google_sheet_name: str, + deck_sheet_serials: Set[str], + deck_sheet_modify_dates: Set[str], + storage_directory: str, +) -> List[Any]: + """Helper for parsing deck calibration data.""" + deck_upload_rows: List[Any] = [] + # Populate Deck # DECK SHEET local_deck_file = google_sheet_name + "-Deck" deck_headers = headers_beg + list(["pipetteCalibratedWith", "Slot"]) + headers_end - deck_sheet_location = read_robot_logs.create_abr_data_sheet( + read_robot_logs.create_abr_data_sheet( storage_directory, local_deck_file, deck_headers ) # DECK DATA - deck = calibration["Deck"] - slots = ["D3", "D1", "A1"] + deck = calibration_log["Deck"] deck_modified = deck["data"].get("lastModified", "") + slots = ["D3", "D1", "A1"] pipette_calibrated_with = deck["data"].get("pipetteCalibratedWith", "") for i in range(len(deck["data"]["matrix"])): + if slots[i] in deck_sheet_serials and deck_modified in deck_sheet_modify_dates: + continue coords = deck["data"]["matrix"][i] x = coords[0] y = coords[1] z = coords[2] - deck_row = list(calibration.values())[:4] + list( + deck_row = list(calibration_log.values())[:4] + list( [pipette_calibrated_with, slots[i], x, y, z, deck_modified] ) - check_for_duplicates( - deck_sheet_location, google_sheet_deck, 6, 10, deck_row, deck_headers + deck_upload_rows.append(deck_row) + return deck_upload_rows + + +def send_batch_update( + instruments_upload_rows: List[str], + google_sheet_instruments: google_sheets_tool.google_sheet, + modules_upload_rows: List[str], + google_sheet_modules: google_sheets_tool.google_sheet, + deck_upload_rows: List[str], + google_sheet_deck: google_sheets_tool.google_sheet, +) -> None: + """Executes batch updates.""" + # Prepare for batch updates + try: + transposed_instruments_upload_rows = list( + map(list, zip(*instruments_upload_rows)) + ) + google_sheet_instruments.batch_update_cells( + transposed_instruments_upload_rows, + "A", + google_sheet_instruments.get_index_row() + 1, + "0", + ) + except Exception: + print("No new instrument data") + try: + transposed_module_upload_rows = list(map(list, zip(*modules_upload_rows))) + google_sheet_modules.batch_update_cells( + transposed_module_upload_rows, + "A", + google_sheet_modules.get_index_row() + 1, + "1020695883", + ) + except Exception: + print("No new module data") + try: + transposed_deck_upload_rows = list(map(list, zip(*deck_upload_rows))) + google_sheet_deck.batch_update_cells( + transposed_deck_upload_rows, + "A", + google_sheet_deck.get_index_row() + 1, + "1332568460", + ) + except Exception: + print("No new deck data") + + +def upload_calibration_offsets( + calibration_data: List[Dict[str, Any]], + storage_directory: str, + google_sheet_instruments: google_sheets_tool.google_sheet, + google_sheet_modules: google_sheets_tool.google_sheet, + google_sheet_deck: google_sheets_tool.google_sheet, + google_sheet_name: str, +) -> None: + """Upload calibration data to google_sheet.""" + # Common Headers + headers_beg = list(calibration_data[0].keys())[:4] + headers_end = list(["X", "Y", "Z", "lastModified"]) + sheets = [google_sheet_instruments, google_sheet_modules, google_sheet_deck] + instruments_upload_rows: List[Any] = [] + modules_upload_rows: List[Any] = [] + deck_upload_rows: List[Any] = [] + inst_sheet_serials: Set[str] = set() + inst_sheet_modify_dates: Set[str] = set() + module_sheet_serials: Set[str] = set() + deck_sheet_serials: Set[str] = set() + deck_sheet_modify_dates: Set[str] = set() + + # Get current serials, and modified info from google sheet + for i, sheet in enumerate(sheets): + if i == 0: + inst_sheet_serials = sheet.get_column(8) + inst_sheet_modify_dates = sheet.get_column(15) + if i == 1: + module_sheet_serials = sheet.get_column(8) + module_modify_dates = sheet.get_column(15) + elif i == 2: + deck_sheet_serials = sheet.get_column(6) + deck_sheet_modify_dates = sheet.get_column(10) + + # Go through caliration logs and deterine what should be added to the sheet + for calibration_log in calibration_data: + for sheet_ind, sheet in enumerate(sheets): + if sheet_ind == 0: + instruments_upload_rows += instrument_helper( + headers_beg, + headers_end, + calibration_log, + google_sheet_name, + inst_sheet_serials, + inst_sheet_modify_dates, + storage_directory, + ) + elif sheet_ind == 1: + modules_upload_rows += module_helper( + headers_beg, + headers_end, + calibration_log, + google_sheet_name, + module_sheet_serials, + module_modify_dates, + storage_directory, + ) + elif sheet_ind == 2: + deck_upload_rows += deck_helper( + headers_beg, + headers_end, + calibration_log, + google_sheet_name, + deck_sheet_serials, + deck_sheet_modify_dates, + storage_directory, + ) + send_batch_update( + instruments_upload_rows, + google_sheet_instruments, + modules_upload_rows, + google_sheet_modules, + deck_upload_rows, + google_sheet_deck, + ) + + +def run( + storage_directory: str, folder_name: str, google_sheet_name_param: str, email: str +) -> None: + """Main control function.""" + # Connect to google drive. + google_sheet_name = google_sheet_name_param + try: + credentials_path = os.path.join(storage_directory, "credentials.json") + except FileNotFoundError: + print(f"Add credentials.json file to: {storage_directory}.") + sys.exit() + google_drive = google_drive_tool.google_drive(credentials_path, folder_name, email) + # Connect to google sheet + google_sheet_instruments = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 0 + ) + google_sheet_modules = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 1 + ) + google_sheet_deck = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 2 + ) + ip_json_file = os.path.join(storage_directory, "IPs.json") + try: + ip_file = json.load(open(ip_json_file)) + except FileNotFoundError: + print(f"Add .json file with robot IPs to: {storage_directory}.") + sys.exit() + ip_or_all = "" + while not ip_or_all: + ip_or_all = input("IP Address or ALL: ") + calibration_data = [] + if ip_or_all.upper() == "ALL": + ip_address_list = ip_file["ip_address_list"] + for ip in ip_address_list: + saved_file_path, calibration = read_robot_logs.get_calibration_offsets( + ip, storage_directory + ) + calibration_data.append(calibration) + # upload_calibration_offsets(calibration, storage_directory) + else: + try: + ( + saved_file_path, + calibration, + ) = read_robot_logs.get_calibration_offsets( + ip_or_all, storage_directory + ) + calibration_data.append(calibration) + except Exception: + print("Invalid IP try again") + ip_or_all = "" + try: + upload_calibration_offsets( + calibration_data, + storage_directory, + google_sheet_instruments, + google_sheet_modules, + google_sheet_deck, + google_sheet_name, ) + print("Successfully uploaded callibration data!") + except Exception: + print("No calibration data to upload: ") + traceback.print_exc() + sys.exit(1) + google_drive.upload_missing_files(storage_directory) if __name__ == "__main__": @@ -160,42 +358,3 @@ def upload_calibration_offsets( folder_name = args.folder_name[0] google_sheet_name = args.google_sheet_name[0] email = args.email[0] - # Connect to google drive. - try: - credentials_path = os.path.join(storage_directory, "credentials.json") - except FileNotFoundError: - print(f"Add credentials.json file to: {storage_directory}.") - sys.exit() - google_drive = google_drive_tool.google_drive(credentials_path, folder_name, email) - # Connect to google sheet - google_sheet_instruments = google_sheets_tool.google_sheet( - credentials_path, google_sheet_name, 0 - ) - google_sheet_modules = google_sheets_tool.google_sheet( - credentials_path, google_sheet_name, 1 - ) - google_sheet_deck = google_sheets_tool.google_sheet( - credentials_path, google_sheet_name, 2 - ) - ip_json_file = os.path.join(storage_directory, "IPs.json") - try: - ip_file = json.load(open(ip_json_file)) - except FileNotFoundError: - print(f"Add .json file with robot IPs to: {storage_directory}.") - sys.exit() - ip_or_all = input("IP Address or ALL: ") - - if ip_or_all == "ALL": - ip_address_list = ip_file["ip_address_list"] - for ip in ip_address_list: - saved_file_path, calibration = read_robot_logs.get_calibration_offsets( - ip, storage_directory - ) - upload_calibration_offsets(calibration, storage_directory) - else: - saved_file_path, calibration = read_robot_logs.get_calibration_offsets( - ip_or_all, storage_directory - ) - upload_calibration_offsets(calibration, storage_directory) - - google_drive.upload_missing_files(storage_directory) diff --git a/abr-testing/abr_testing/data_collection/abr_google_drive.py b/abr-testing/abr_testing/data_collection/abr_google_drive.py index e1924e3c53e..88ed55cab82 100644 --- a/abr-testing/abr_testing/data_collection/abr_google_drive.py +++ b/abr-testing/abr_testing/data_collection/abr_google_drive.py @@ -158,38 +158,10 @@ def create_data_dictionary( return transposed_runs_and_robots, headers, transposed_runs_and_lpc, headers_lpc -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Read run logs on google drive.") - parser.add_argument( - "storage_directory", - metavar="STORAGE_DIRECTORY", - type=str, - nargs=1, - help="Path to long term storage directory for run logs.", - ) - parser.add_argument( - "folder_name", - metavar="FOLDER_NAME", - type=str, - nargs=1, - help="Google Drive folder name. Open desired folder and copy string after drive/folders/.", - ) - parser.add_argument( - "google_sheet_name", - metavar="GOOGLE_SHEET_NAME", - type=str, - nargs=1, - help="Google sheet name.", - ) - parser.add_argument( - "email", metavar="EMAIL", type=str, nargs=1, help="opentrons gmail." - ) - args = parser.parse_args() - folder_name = args.folder_name[0] - storage_directory = args.storage_directory[0] - google_sheet_name = args.google_sheet_name[0] - email = args.email[0] - +def run( + storage_directory: str, folder_name: str, google_sheet_name: str, email: str +) -> None: + """Main control function.""" try: credentials_path = os.path.join(storage_directory, "credentials.json") except FileNotFoundError: @@ -203,7 +175,6 @@ def create_data_dictionary( # Get run ids on google sheet run_ids_on_gs = set(google_sheet.get_column(2)) # Get robots on google sheet - robots = list(set(google_sheet.get_column(1))) # Uploads files that are not in google drive directory google_drive.upload_missing_files(storage_directory) @@ -229,7 +200,6 @@ def create_data_dictionary( hellma_plate_standards=file_values, ) start_row = google_sheet.get_index_row() + 1 - print(start_row) google_sheet.batch_update_cells(transposed_runs_and_robots, "A", start_row, "0") # Add LPC to google sheet @@ -238,6 +208,40 @@ def create_data_dictionary( google_sheet_lpc.batch_update_cells( transposed_runs_and_lpc, "A", start_row_lpc, "0" ) - robots = list(set(google_sheet.get_column(1))) # Calculate Robot Lifetimes sync_abr_sheet.determine_lifetime(google_sheet) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Read run logs on google drive.") + parser.add_argument( + "storage_directory", + metavar="STORAGE_DIRECTORY", + type=str, + nargs=1, + help="Path to long term storage directory for run logs.", + ) + parser.add_argument( + "folder_name", + metavar="FOLDER_NAME", + type=str, + nargs=1, + help="Google Drive folder name. Open desired folder and copy string after drive/folders/.", + ) + parser.add_argument( + "google_sheet_name", + metavar="GOOGLE_SHEET_NAME", + type=str, + nargs=1, + help="Google sheet name.", + ) + parser.add_argument( + "email", metavar="EMAIL", type=str, nargs=1, help="opentrons gmail." + ) + args = parser.parse_args() + folder_name = args.folder_name[0] + storage_directory = args.storage_directory[0] + google_sheet_name = args.google_sheet_name[0] + email = args.email[0] + + run(storage_directory, folder_name, google_sheet_name, email) diff --git a/abr-testing/abr_testing/data_collection/get_run_logs.py b/abr-testing/abr_testing/data_collection/get_run_logs.py index 3d8eb851197..24d5aaf4f3b 100644 --- a/abr-testing/abr_testing/data_collection/get_run_logs.py +++ b/abr-testing/abr_testing/data_collection/get_run_logs.py @@ -92,7 +92,9 @@ def save_runs(runs_to_save: Set[str], ip: str, storage_directory: str) -> Set[st return saved_file_paths -def get_all_run_logs(storage_directory: str) -> None: +def get_all_run_logs( + storage_directory: str, google_drive: google_drive_tool.google_drive +) -> None: """GET ALL RUN LOGS. Connect to each ABR robot to read run log data. @@ -114,6 +116,17 @@ def get_all_run_logs(storage_directory: str) -> None: google_drive.upload_missing_files(storage_directory) +def run(storage_directory: str, folder_name: str, email: str) -> None: + """Main control function.""" + try: + credentials_path = os.path.join(storage_directory, "credentials.json") + except FileNotFoundError: + print(f"Add credentials.json file to: {storage_directory}.") + sys.exit() + google_drive = google_drive_tool.google_drive(credentials_path, folder_name, email) + get_all_run_logs(storage_directory, google_drive) + + if __name__ == "__main__": """Get run logs.""" parser = argparse.ArgumentParser(description="Pulls run logs from ABR robots.") @@ -138,10 +151,4 @@ def get_all_run_logs(storage_directory: str) -> None: storage_directory = args.storage_directory[0] folder_name = args.folder_name[0] email = args.email[0] - try: - credentials_path = os.path.join(storage_directory, "credentials.json") - except FileNotFoundError: - print(f"Add credentials.json file to: {storage_directory}.") - sys.exit() - google_drive = google_drive_tool.google_drive(credentials_path, folder_name, email) - get_all_run_logs(storage_directory) + run(storage_directory, folder_name, email) diff --git a/abr-testing/abr_testing/data_collection/read_robot_logs.py b/abr-testing/abr_testing/data_collection/read_robot_logs.py index be74294fbe5..ff650335d84 100644 --- a/abr-testing/abr_testing/data_collection/read_robot_logs.py +++ b/abr-testing/abr_testing/data_collection/read_robot_logs.py @@ -13,7 +13,6 @@ import time as t import json import requests -import sys from abr_testing.tools import plate_reader @@ -695,7 +694,7 @@ def get_calibration_offsets( print(f"Connected to {ip}") except Exception: print(f"ERROR: Failed to read IP address: {ip}") - sys.exit() + raise health_data = response.json() robot_name = health_data.get("name", "") api_version = health_data.get("api_version", "") diff --git a/abr-testing/abr_testing/tools/abr_setup.py b/abr-testing/abr_testing/tools/abr_setup.py new file mode 100644 index 00000000000..853f1c53ced --- /dev/null +++ b/abr-testing/abr_testing/tools/abr_setup.py @@ -0,0 +1,139 @@ +"""Automate ABR data collection.""" +import os +import time +import configparser +import traceback +import sys +from hardware_testing.scripts import ABRAsairScript # type: ignore +from abr_testing.data_collection import ( + get_run_logs, + abr_google_drive, + abr_calibration_logs, +) + + +def run_temp_sensor(ip_file: str) -> None: + """Run temperature sensors on all robots.""" + processes = ABRAsairScript.run(ip_file) + for process in processes: + process.start() + time.sleep(20) + for process in processes: + process.join() + + +def get_abr_logs(storage_directory: str, folder_name: str, email: str) -> None: + """Retrieve run logs on all robots and record missing run logs in google drive.""" + try: + get_run_logs.run(storage_directory, folder_name, email) + except Exception as e: + print("Cannot Get Run Logs", e) + traceback.print_exc + + +def record_abr_logs( + storage_directory: str, folder_name: str, google_sheet_name: str, email: str +) -> None: + """Write run logs to ABR run logs in sheets.""" + try: + abr_google_drive.run(storage_directory, folder_name, google_sheet_name, email) + except Exception as e: + print(e) + + +def get_calibration_data( + storage_directory: str, folder_name: str, google_sheet_name: str, email: str +) -> None: + """Download calibration logs and write to ABR-calibration-data in sheets.""" + try: + abr_calibration_logs.run( + storage_directory, folder_name, google_sheet_name, email + ) + except Exception as e: + print("Cannot get callibration data", e) + traceback.print_exc() + + +def main(configurations: configparser.ConfigParser) -> None: + """Main function.""" + ip_file = None + storage_directory = None + email = None + drive_folder = None + sheet_name = None + + has_defaults = False + # If default is not specified get all values + default = configurations["DEFAULT"] + if len(default) > 0: + has_defaults = True + try: + if has_defaults: + storage_directory = default["Storage"] + email = default["Email"] + drive_folder = default["Drive_Folder"] + sheet_name = default["Sheet_Name"] + except KeyError as e: + print("Cannot read config file\n" + str(e)) + + # Run Temperature Sensors + if not has_defaults: + ip_file = configurations["TEMP-SENSOR"]["Robo_List"] + print("Starting temp sensors...") + if ip_file: + run_temp_sensor(ip_file) + print("Temp Sensors Started") + else: + print("Missing ip_file location, please fix configs") + sys.exit(1) + # Get Run Logs and Record + if not has_defaults: + storage_directory = configurations["RUN-LOG"]["Storage"] + email = configurations["RUN-LOG"]["Email"] + drive_folder = configurations["RUN-LOG"]["Drive_Folder"] + sheet_name = configurations["RUN-LOG"]["Sheet_Name"] + print(sheet_name) + if storage_directory and drive_folder and sheet_name and email: + print("Retrieving robot run logs...") + get_abr_logs(storage_directory, drive_folder, email) + print("Recording robot run logs...") + record_abr_logs(storage_directory, drive_folder, sheet_name, email) + print("Run logs updated") + else: + print("Storage, Email, or Drive Folder is missing, please fix configs") + sys.exit(1) + + # Collect calibration data + if not has_defaults: + storage_directory = configurations["CALIBRATION"]["Storage"] + email = configurations["CALIBRATION"]["Email"] + drive_folder = configurations["CALIBRATION"]["Drive_Folder"] + sheet_name = configurations["CALIBRATION"]["Sheet_Name"] + if storage_directory and drive_folder and sheet_name and email: + print("Retrieving and recording robot calibration data...") + get_calibration_data(storage_directory, drive_folder, sheet_name, email) + print("Calibration logs updated") + else: + print( + "Storage, Email, Drive Folder, or Sheet name is missing, please fix configs" + ) + sys.exit(1) + + +if __name__ == "__main__": + configurations = None + configs_file = None + while not configs_file: + configs_file = input("Please enter path to config.ini: ") + if os.path.exists(configs_file): + break + else: + configs_file = None + print("Please enter a valid path") + try: + configurations = configparser.ConfigParser() + configurations.read(configs_file) + except configparser.ParsingError as e: + print("Cannot read configuration file\n" + str(e)) + if configurations: + main(configurations) diff --git a/abr-testing/abr_testing/tools/sync_abr_sheet.py b/abr-testing/abr_testing/tools/sync_abr_sheet.py index aca116292a8..569f0f9b834 100644 --- a/abr-testing/abr_testing/tools/sync_abr_sheet.py +++ b/abr-testing/abr_testing/tools/sync_abr_sheet.py @@ -7,6 +7,8 @@ import csv import sys import os +import time +import traceback from typing import Dict, Tuple, Any, List from statistics import mean, StatisticsError @@ -27,76 +29,94 @@ def determine_lifetime(abr_google_sheet: Any) -> None: ) # Goes through dataframe per robot for index, run in df_sheet_data.iterrows(): - end_time = run["End_Time"] - robot = run["Robot"] - robot_lifetime = ( - float(run["Robot Lifetime (%)"]) if run["Robot Lifetime (%)"] != "" else 0 + max_retries = 5 + retries = 0 + while retries < max_retries: + try: + update_df(abr_google_sheet, lifetime_index, df_sheet_data, dict(run)) + break + except Exception as e: + if "Quota exceeded for quota metric" in str(e): + retries += 1 + print( + f"Read/write limit reached on attempt: {retries}, pausing then retrying..." + ) + time.sleep(65) + else: + print("unrecoverable error:", e) + traceback.print_exc() + sys.exit(1) + + +def update_df( + abr_google_sheet: Any, lifetime_index: int, df_sheet_data: Any, run: Dict[Any, Any] +) -> None: + """Update google sheets with new run log data.""" + end_time = run["End_Time"] + robot = run["Robot"] + robot_lifetime = ( + float(run["Robot Lifetime (%)"]) if run["Robot Lifetime (%)"] != "" else 0 + ) + if robot_lifetime < 1 and len(run["Run_ID"]) > 1: + # Get Robot % Lifetime + robot_runs_before = df_sheet_data[ + (df_sheet_data["End_Time"] <= end_time) & (df_sheet_data["Robot"] == robot) + ] + robot_percent_lifetime = ( + (robot_runs_before["Run_Time (min)"].sum() / 60) / 3750 * 100 ) - if robot_lifetime < 1 and len(run["Run_ID"]) > 1: - # Get Robot % Lifetime - robot_runs_before = df_sheet_data[ + # Get Left Pipette % Lifetime + left_pipette = run["Left Mount"] + if len(left_pipette) > 1: + left_pipette_runs_before = df_sheet_data[ (df_sheet_data["End_Time"] <= end_time) - & (df_sheet_data["Robot"] == robot) + & ( + (df_sheet_data["Left Mount"] == left_pipette) + | (df_sheet_data["Right Mount"] == left_pipette) + ) ] - robot_percent_lifetime = ( - (robot_runs_before["Run_Time (min)"].sum() / 60) / 3750 * 100 + left_pipette_percent_lifetime = ( + (left_pipette_runs_before["Run_Time (min)"].sum() / 60) / 1248 * 100 ) - # Get Left Pipette % Lifetime - left_pipette = run["Left Mount"] - if len(left_pipette) > 1: - left_pipette_runs_before = df_sheet_data[ - (df_sheet_data["End_Time"] <= end_time) - & ( - (df_sheet_data["Left Mount"] == left_pipette) - | (df_sheet_data["Right Mount"] == left_pipette) - ) - ] - left_pipette_percent_lifetime = ( - (left_pipette_runs_before["Run_Time (min)"].sum() / 60) / 1248 * 100 - ) - else: - left_pipette_percent_lifetime = "" - # Get Right Pipette % Lifetime - right_pipette = run["Right Mount"] - if len(right_pipette) > 1: - right_pipette_runs_before = df_sheet_data[ - (df_sheet_data["End_Time"] <= end_time) - & ( - (df_sheet_data["Left Mount"] == right_pipette) - | (df_sheet_data["Right Mount"] == right_pipette) - ) - ] - right_pipette_percent_lifetime = ( - (right_pipette_runs_before["Run_Time (min)"].sum() / 60) - / 1248 - * 100 - ) - else: - right_pipette_percent_lifetime = "" - # Get Gripper % Lifetime - gripper = run["Extension"] - if len(gripper) > 1: - gripper_runs_before = df_sheet_data[ - (df_sheet_data["End_Time"] <= end_time) - & (df_sheet_data["Extension"] == gripper) - ] - gripper_percent_lifetime = ( - (gripper_runs_before["Run_Time (min)"].sum() / 60) / 3750 * 100 + else: + left_pipette_percent_lifetime = "" + # Get Right Pipette % Lifetime + right_pipette = run["Right Mount"] + if len(right_pipette) > 1: + right_pipette_runs_before = df_sheet_data[ + (df_sheet_data["End_Time"] <= end_time) + & ( + (df_sheet_data["Left Mount"] == right_pipette) + | (df_sheet_data["Right Mount"] == right_pipette) ) - else: - gripper_percent_lifetime = "" - run_id = run["Run_ID"] - row_num = abr_google_sheet.get_row_index_with_value(run_id, 2) - update_list = [ - [robot_percent_lifetime], - [left_pipette_percent_lifetime], - [right_pipette_percent_lifetime], - [gripper_percent_lifetime], ] - abr_google_sheet.batch_update_cells( - update_list, lifetime_index, row_num, "0" + right_pipette_percent_lifetime = ( + (right_pipette_runs_before["Run_Time (min)"].sum() / 60) / 1248 * 100 ) - print(f"Updated row {row_num} for run: {run_id}") + else: + right_pipette_percent_lifetime = "" + # Get Gripper % Lifetime + gripper = run["Extension"] + if len(gripper) > 1: + gripper_runs_before = df_sheet_data[ + (df_sheet_data["End_Time"] <= end_time) + & (df_sheet_data["Extension"] == gripper) + ] + gripper_percent_lifetime = ( + (gripper_runs_before["Run_Time (min)"].sum() / 60) / 3750 * 100 + ) + else: + gripper_percent_lifetime = "" + run_id = run["Run_ID"] + row_num = abr_google_sheet.get_row_index_with_value(run_id, 2) + update_list = [ + [robot_percent_lifetime], + [left_pipette_percent_lifetime], + [right_pipette_percent_lifetime], + [gripper_percent_lifetime], + ] + abr_google_sheet.batch_update_cells(update_list, lifetime_index, row_num, "0") + print(f"Updated row {row_num} for run: {run_id}") def compare_run_to_temp_data( diff --git a/abr-testing/protocol_simulation/abr_sim_check.py b/abr-testing/protocol_simulation/abr_sim_check.py new file mode 100644 index 00000000000..a97a0b3692e --- /dev/null +++ b/abr-testing/protocol_simulation/abr_sim_check.py @@ -0,0 +1,33 @@ +from protocol_simulation import simulation_metrics +import os +import traceback +from pathlib import Path + +def run(file_to_simulate: Path): + protocol_name = file_to_simulate.stem + try: + simulation_metrics.main(file_to_simulate, False) + except Exception as e: + print(f"Error in protocol: {protocol_name}") + traceback.print_exc() + + + + +if __name__ == "__main__": + # Directory to search + root_dir = 'abr_testing/protocols' + + exclude = [ + '__init__.py', + 'shared_vars_and_funcs.py', + ] + # Walk through the root directory and its subdirectories + for root, dirs, files in os.walk(root_dir): + for file in files: + if file.endswith(".py"): # If it's a Python file + if file in exclude: + continue + file_path = os.path.join(root, file) + print(f"Simulating protocol: {file_path}") + run(Path(file_path)) \ No newline at end of file diff --git a/abr-testing/protocol_simulation/simulation_metrics.py b/abr-testing/protocol_simulation/simulation_metrics.py index 544bc3fb4bc..dfbba90949b 100644 --- a/abr-testing/protocol_simulation/simulation_metrics.py +++ b/abr-testing/protocol_simulation/simulation_metrics.py @@ -12,22 +12,9 @@ from typing import Set, Dict, Any, Tuple, List, Union from abr_testing.tools import plate_reader -def look_for_air_gaps(protocol_file_path: str) -> int: - instances = 0 - try: - with open(protocol_file_path, "r") as open_file: - protocol_lines = open_file.readlines() - for line in protocol_lines: - if "air_gap" in line: - print(line) - instances += 1 - print(f'Found {instances} instance(s) of the air gap function') - open_file.close() - except Exception as error: - print("Error reading protocol:", error.with_traceback()) - return instances - -def set_api_level(protocol_file_path) -> None: + + +def set_api_level(protocol_file_path: str) -> None: with open(protocol_file_path, "r") as file: file_contents = file.readlines() # Look for current'apiLevel:' @@ -47,13 +34,33 @@ def set_api_level(protocol_file_path) -> None: file.writelines(file_contents) print("File updated successfully.") -original_exit = sys.exit +def look_for_air_gaps(protocol_file_path: str) -> int: + """Search Protocol for Air Gaps""" + instances = 0 + try: + with open(protocol_file_path, "r") as open_file: + protocol_lines = open_file.readlines() + for line in protocol_lines: + if "air_gap" in line: + print(line) + instances += 1 + print(f'Found {instances} instance(s) of the air gap function') + open_file.close() + except Exception as error: + print("Error reading protocol:", error.with_traceback()) + return instances + -def mock_exit(code=None) -> None: +# Mock sys.exit to avoid program termination +original_exit = sys.exit # Save the original sys.exit function + +def mock_exit(code: Any = None) -> None: + """Prevents program from exiting after analyze""" print(f"sys.exit() called with code: {code}") - raise SystemExit(code) + raise SystemExit(code) # Raise the exception but catch it to prevent termination def get_labware_name(id: str, object_dict: dict, json_data: dict) -> str: + """Recursively find the labware_name""" slot = "" for obj in object_dict: if obj['id'] == id: @@ -62,6 +69,7 @@ def get_labware_name(id: str, object_dict: dict, json_data: dict) -> str: slot = obj['location']['slotName'] return " SLOT: " + slot except KeyError: + # Handle KeyError when location or slotName is missing location = obj.get('location', {}) # Check if location contains 'moduleId' @@ -74,15 +82,18 @@ def get_labware_name(id: str, object_dict: dict, json_data: dict) -> str: return " Labware not found" + def parse_results_volume(json_data_file: str) -> Tuple[ List[str], List[str], List[str], List[str], List[str], List[str], List[str], List[str], List[str], List[str], List[str] ]: + """Pars run log and extract neccessay information""" json_data = [] with open(json_data_file, "r") as json_file: json_data = json.load(json_file) commands = json_data.get("commands", []) + start_time = datetime.fromisoformat(commands[0]["createdAt"]) end_time = datetime.fromisoformat(commands[len(commands)-1]["completedAt"]) header = ["", "Protocol Name", "Date", "Time"] @@ -127,6 +138,7 @@ def parse_results_volume(json_data_file: str) -> Tuple[ "Average Liquid Probe Time (sec)", ] values_row = ["Value"] + labware_well_dict = {} hs_dict, temp_module_dict, thermo_cycler_dict, plate_reader_dict, instrument_dict = {}, {}, {}, {}, {} try: @@ -140,53 +152,52 @@ def parse_results_volume(json_data_file: str) -> Tuple[ metrics = [hs_dict, temp_module_dict, thermo_cycler_dict, plate_reader_dict, instrument_dict] - # Iterate through all the commands executed in the protocol run log for x, command in enumerate(commands): if x != 0: prev_command = commands[x-1] if command["commandType"] == "aspirate": - if not (prev_command["commandType"] == "comment" and (prev_command['params']['message'] == "AIR GAP" or prev_command['params']['message'] == "MIXING")): - labware_id = command["params"]["labwareId"] - labware_name = "" - for labware in json_data.get("labware"): - if labware["id"] == labware_id: - labware_name = (labware["loadName"]) + get_labware_name(labware["id"], json_data["labware"], json_data) - well_name = command["params"]["wellName"] + labware_id = command["params"]["labwareId"] + labware_name = "" + for labware in json_data.get("labware"): + if labware["id"] == labware_id: + labware_name = (labware["loadName"]) + get_labware_name(labware["id"], json_data["labware"], json_data) + well_name = command["params"]["wellName"] - if labware_id not in labware_well_dict: - labware_well_dict[labware_id] = {} + if labware_id not in labware_well_dict: + labware_well_dict[labware_id] = {} - if well_name not in labware_well_dict[labware_id]: - labware_well_dict[labware_id][well_name] = (labware_name, 0, 0, "") + if well_name not in labware_well_dict[labware_id]: + labware_well_dict[labware_id][well_name] = (labware_name, 0, 0, "") - vol = int(command["params"]["volume"]) + vol = int(command["params"]["volume"]) - labware_name, added_volumes, subtracted_volumes, log = labware_well_dict[labware_id][well_name] + labware_name, added_volumes, subtracted_volumes, log = labware_well_dict[labware_id][well_name] + + subtracted_volumes += vol + log+=(f"aspirated {vol} ") + labware_well_dict[labware_id][well_name] = (labware_name, added_volumes, subtracted_volumes, log) - subtracted_volumes += vol - log+=(f"aspirated {vol} ") - labware_well_dict[labware_id][well_name] = (labware_name, added_volumes, subtracted_volumes, log) elif command["commandType"] == "dispense": - if not (prev_command["commandType"] == "comment" and (prev_command['params']['message'] == "MIXING")): - labware_id = command["params"]["labwareId"] - labware_name = "" - for labware in json_data.get("labware"): - if labware["id"] == labware_id: - labware_name = (labware["loadName"]) + get_labware_name(labware["id"], json_data["labware"], json_data) - well_name = command["params"]["wellName"] - - if labware_id not in labware_well_dict: - labware_well_dict[labware_id] = {} - - if well_name not in labware_well_dict[labware_id]: - labware_well_dict[labware_id][well_name] = (labware_name, 0, 0, "") - - vol = int(command["params"]["volume"]) - labware_name, added_volumes, subtracted_volumes, log = labware_well_dict[labware_id][well_name] - added_volumes += vol - log+=(f"dispensed {vol} ") - labware_well_dict[labware_id][well_name] = (labware_name, added_volumes, subtracted_volumes, log) - # file_date_formatted = file_date.strftime("%Y-%m-%d_%H-%M-%S") + labware_id = command["params"]["labwareId"] + labware_name = "" + for labware in json_data.get("labware"): + if labware["id"] == labware_id: + labware_name = (labware["loadName"]) + get_labware_name(labware["id"], json_data["labware"], json_data) + well_name = command["params"]["wellName"] + + if labware_id not in labware_well_dict: + labware_well_dict[labware_id] = {} + + if well_name not in labware_well_dict[labware_id]: + labware_well_dict[labware_id][well_name] = (labware_name, 0, 0, "") + + vol = int(command["params"]["volume"]) + + labware_name, added_volumes, subtracted_volumes, log = labware_well_dict[labware_id][well_name] + + added_volumes += vol + log+=(f"dispensed {vol} ") + labware_well_dict[labware_id][well_name] = (labware_name, added_volumes, subtracted_volumes, log) with open(f"{os.path.dirname(json_data_file)}\\{protocol_name}_well_volumes_{file_date_formatted}.json", "w") as output_file: json.dump(labware_well_dict, output_file) output_file.close() @@ -224,9 +235,10 @@ def parse_results_volume(json_data_file: str) -> Tuple[ metrics_row, values_row) -def main(storage_directory, google_sheet_name, protocol_file_path): - sys.exit = mock_exit +def main(protocol_file_path: Path, save: bool, storage_directory: str = os.curdir, google_sheet_name: str = "") -> None: + """Main module control""" + sys.exit = mock_exit # Replace sys.exit with the mock function # Read file path from arguments protocol_file_path = Path(protocol_file_path) global protocol_name @@ -236,27 +248,41 @@ def main(storage_directory, google_sheet_name, protocol_file_path): file_date = datetime.now() global file_date_formatted file_date_formatted = file_date.strftime("%Y-%m-%d_%H-%M-%S") - # Prepare output file - json_file_path = f"{storage_directory}\\{protocol_name}_{file_date_formatted}.json" - json_file_output = open(json_file_path, "wb+") - error_output = f"{storage_directory}\\error_log" + error_output = f"{storage_directory}\\test_debug" # Run protocol simulation try: with Context(analyze) as ctx: - ctx.invoke( - analyze, - files=[protocol_file_path], - json_output=json_file_output, - human_json_output=None, - log_output=error_output, - log_level="ERROR", - check=False - ) + if save: + # Prepare output file + json_file_path = f"{storage_directory}\\{protocol_name}_{file_date_formatted}.json" + json_file_output = open(json_file_path, "wb+") + # log_output_file = f"{protocol_name}_log" + ctx.invoke( + analyze, + files=[protocol_file_path], + json_output=json_file_output, + human_json_output=None, + log_output=error_output, + log_level="ERROR", + check=False + ) + json_file_output.close() + else: + ctx.invoke( + analyze, + files=[protocol_file_path], + json_output=None, + human_json_output=None, + log_output=error_output, + log_level="ERROR", + check=True + ) + except SystemExit as e: print(f"SystemExit caught with code: {e}") finally: + # Reset sys.exit to the original behavior sys.exit = original_exit - json_file_output.close() with open(error_output, "r") as open_file: try: errors = open_file.readlines() @@ -267,32 +293,30 @@ def main(storage_directory, google_sheet_name, protocol_file_path): except: print("error simulating ...") sys.exit() + if save: + try: + credentials_path = os.path.join(storage_directory, "credentials.json") + print(credentials_path) - try: - credentials_path = os.path.join(storage_directory, "credentials.json") - print(credentials_path) - except FileNotFoundError: - print(f"Add credentials.json file to: {storage_directory}.") - sys.exit() - - global hellma_plate_standards - - try: - hellma_plate_standards = plate_reader.read_hellma_plate_files(storage_directory, 101934) - except: - print(f"Add helma plate standard files to {storage_directory}.") - sys.exit() - - google_sheet = google_sheets_tool.google_sheet( - credentials_path, google_sheet_name, 0 - ) - - google_sheet.write_to_row([]) - - for row in parse_results_volume(json_file_path): - print("Writing results to", google_sheet_name) - print(str(row)) - google_sheet.write_to_row(row) + except FileNotFoundError: + print(f"Add credentials.json file to: {storage_directory}.") + sys.exit() + + global hellma_plate_standards + try: + hellma_plate_standards = plate_reader.read_hellma_plate_files(storage_directory, 101934) + + except: + print(f"Add helma plate standard files to {storage_directory}.") + sys.exit() + google_sheet = google_sheets_tool.google_sheet( + credentials_path, google_sheet_name, 0 + ) + google_sheet.write_to_row([]) + for row in parse_results_volume(json_file_path): + print("Writing results to", google_sheet_name) + print(str(row)) + google_sheet.write_to_row(row) if __name__ == "__main__": CLEAN_PROTOCOL = True @@ -343,11 +367,13 @@ def main(storage_directory, google_sheet_name, protocol_file_path): choice = "" print("Please enter a valid response.") SETUP = False - + + # set_api_level() if CLEAN_PROTOCOL: + set_api_level(Path(protocol_file_path)) main( - storage_directory, - sheet_name, protocol_file_path, - ) + True, + storage_directory, + sheet_name,) else: sys.exit(0) \ No newline at end of file diff --git a/api-client/src/runs/types.ts b/api-client/src/runs/types.ts index 0415367f1e6..c53c589b231 100644 --- a/api-client/src/runs/types.ts +++ b/api-client/src/runs/types.ts @@ -60,6 +60,7 @@ export interface LegacyGoodRunData { export interface KnownGoodRunData extends LegacyGoodRunData { ok: true runTimeParameters: RunTimeParameter[] + outputFileIds: string[] } export interface KnownInvalidRunData extends LegacyGoodRunData { @@ -98,7 +99,7 @@ export interface RunsLinks { } export interface RunCommandLink { - current: CommandLinkNoMeta + lastCompleted: CommandLinkNoMeta } export interface CommandLinkNoMeta { diff --git a/api/src/opentrons/config/__init__.py b/api/src/opentrons/config/__init__.py index a4571521211..71ba78d39b0 100644 --- a/api/src/opentrons/config/__init__.py +++ b/api/src/opentrons/config/__init__.py @@ -202,6 +202,15 @@ class ConfigElement(NamedTuple): " absolute path, it will be used directly. If it is a " "relative path it will be relative to log_dir", ), + ConfigElement( + "sensor_log_file", + "Sensor Log File", + Path("logs") / "sensor.log", + ConfigElementType.FILE, + "The location of the file to save sensor logs to. If this is an" + " absolute path, it will be used directly. If it is a " + "relative path it will be relative to log_dir", + ), ConfigElement( "serial_log_file", "Serial Log File", diff --git a/api/src/opentrons/config/defaults_ot3.py b/api/src/opentrons/config/defaults_ot3.py index 08b86f16c95..55565745d3a 100644 --- a/api/src/opentrons/config/defaults_ot3.py +++ b/api/src/opentrons/config/defaults_ot3.py @@ -15,7 +15,6 @@ LiquidProbeSettings, ZSenseSettings, EdgeSenseSettings, - OutputOptions, ) @@ -27,13 +26,11 @@ plunger_speed=15, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.sync_buffer_to_csv, aspirate_while_sensing=False, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "/data/pressure_sensor_data.csv"}, ) DEFAULT_CALIBRATION_SETTINGS: Final[OT3CalibrationSettings] = OT3CalibrationSettings( @@ -43,7 +40,6 @@ max_overrun_distance_mm=5.0, speed_mm_per_s=1.0, sensor_threshold_pf=3.0, - output_option=OutputOptions.sync_only, ), ), edge_sense=EdgeSenseSettings( @@ -54,7 +50,6 @@ max_overrun_distance_mm=0.5, speed_mm_per_s=1, sensor_threshold_pf=3.0, - output_option=OutputOptions.sync_only, ), search_initial_tolerance_mm=12.0, search_iteration_limit=8, @@ -195,23 +190,6 @@ ) -def _build_output_option_with_default( - from_conf: Any, default: OutputOptions -) -> OutputOptions: - if from_conf is None: - return default - else: - if isinstance(from_conf, OutputOptions): - return from_conf - else: - try: - enumval = OutputOptions[from_conf] - except KeyError: # not an enum entry - return default - else: - return enumval - - def _build_log_files_with_default( from_conf: Any, default: Optional[Dict[InstrumentProbeType, str]], @@ -316,24 +294,12 @@ def _build_default_cap_pass( sensor_threshold_pf=from_conf.get( "sensor_threshold_pf", default.sensor_threshold_pf ), - output_option=from_conf.get("output_option", default.output_option), ) def _build_default_liquid_probe( from_conf: Any, default: LiquidProbeSettings ) -> LiquidProbeSettings: - output_option = _build_output_option_with_default( - from_conf.get("output_option", None), default.output_option - ) - data_files: Optional[Dict[InstrumentProbeType, str]] = None - if ( - output_option is OutputOptions.sync_buffer_to_csv - or output_option is OutputOptions.stream_to_csv - ): - data_files = _build_log_files_with_default( - from_conf.get("data_files", None), default.data_files - ) return LiquidProbeSettings( mount_speed=from_conf.get("mount_speed", default.mount_speed), plunger_speed=from_conf.get("plunger_speed", default.plunger_speed), @@ -343,7 +309,6 @@ def _build_default_liquid_probe( sensor_threshold_pascals=from_conf.get( "sensor_threshold_pascals", default.sensor_threshold_pascals ), - output_option=from_conf.get("output_option", default.output_option), aspirate_while_sensing=from_conf.get( "aspirate_while_sensing", default.aspirate_while_sensing ), @@ -357,7 +322,6 @@ def _build_default_liquid_probe( "samples_for_baselining", default.samples_for_baselining ), sample_time_sec=from_conf.get("sample_time_sec", default.sample_time_sec), - data_files=data_files, ) diff --git a/api/src/opentrons/config/types.py b/api/src/opentrons/config/types.py index 5a6c67725d0..d35b58578ca 100644 --- a/api/src/opentrons/config/types.py +++ b/api/src/opentrons/config/types.py @@ -1,8 +1,8 @@ from enum import Enum from dataclasses import dataclass, asdict, fields -from typing import Dict, Tuple, TypeVar, Generic, List, cast, Optional +from typing import Dict, Tuple, TypeVar, Generic, List, cast from typing_extensions import TypedDict, Literal -from opentrons.hardware_control.types import OT3AxisKind, InstrumentProbeType +from opentrons.hardware_control.types import OT3AxisKind class AxisDict(TypedDict): @@ -103,25 +103,12 @@ def by_gantry_load( ) -class OutputOptions(int, Enum): - """Specifies where we should report sensor data to during a sensor pass.""" - - stream_to_csv = 0x1 # compile sensor data stream into a csv file, in addition to can_bus_only behavior - sync_buffer_to_csv = 0x2 # collect sensor data on pipette mcu, then stream to robot server and compile into a csv file, in addition to can_bus_only behavior - can_bus_only = ( - 0x4 # stream sensor data over CAN bus, in addition to sync_only behavior - ) - sync_only = 0x8 # trigger pipette sync line upon sensor's detection of something - - @dataclass(frozen=True) class CapacitivePassSettings: prep_distance_mm: float max_overrun_distance_mm: float speed_mm_per_s: float sensor_threshold_pf: float - output_option: OutputOptions - data_files: Optional[Dict[InstrumentProbeType, str]] = None @dataclass(frozen=True) @@ -135,13 +122,11 @@ class LiquidProbeSettings: plunger_speed: float plunger_impulse_time: float sensor_threshold_pascals: float - output_option: OutputOptions aspirate_while_sensing: bool z_overlap_between_passes_mm: float plunger_reset_offset: float samples_for_baselining: int sample_time_sec: float - data_files: Optional[Dict[InstrumentProbeType, str]] @dataclass(frozen=True) diff --git a/api/src/opentrons/hardware_control/backends/flex_protocol.py b/api/src/opentrons/hardware_control/backends/flex_protocol.py index 6f3299cf92d..466e7890026 100644 --- a/api/src/opentrons/hardware_control/backends/flex_protocol.py +++ b/api/src/opentrons/hardware_control/backends/flex_protocol.py @@ -15,7 +15,7 @@ from opentrons_shared_data.pipette.types import ( PipetteName, ) -from opentrons.config.types import GantryLoad, OutputOptions +from opentrons.config.types import GantryLoad from opentrons.hardware_control.types import ( BoardRevision, Axis, @@ -38,6 +38,8 @@ StatusBarState, ) from opentrons.hardware_control.module_control import AttachedModulesControl +from opentrons_hardware.firmware_bindings.constants import SensorId +from opentrons_hardware.sensors.types import SensorDataType from ..dev_types import OT3AttachedInstruments from .types import HWStopCondition @@ -152,10 +154,11 @@ async def liquid_probe( threshold_pascals: float, plunger_impulse_time: float, num_baseline_reads: int, - output_format: OutputOptions = OutputOptions.can_bus_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: ... @@ -371,8 +374,6 @@ async def capacitive_probe( speed_mm_per_s: float, sensor_threshold_pf: float, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, - output_format: OutputOptions = OutputOptions.sync_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, ) -> bool: ... diff --git a/api/src/opentrons/hardware_control/backends/ot3controller.py b/api/src/opentrons/hardware_control/backends/ot3controller.py index 84c95c8fbc4..48787e86933 100644 --- a/api/src/opentrons/hardware_control/backends/ot3controller.py +++ b/api/src/opentrons/hardware_control/backends/ot3controller.py @@ -25,7 +25,7 @@ Union, Mapping, ) -from opentrons.config.types import OT3Config, GantryLoad, OutputOptions +from opentrons.config.types import OT3Config, GantryLoad from opentrons.config import gripper_config from .ot3utils import ( axis_convert, @@ -102,7 +102,9 @@ NodeId, PipetteName as FirmwarePipetteName, ErrorCode, + SensorId, ) +from opentrons_hardware.sensors.types import SensorDataType from opentrons_hardware.firmware_bindings.messages.message_definitions import ( StopRequest, ) @@ -1368,28 +1370,14 @@ async def liquid_probe( threshold_pascals: float, plunger_impulse_time: float, num_baseline_reads: int, - output_option: OutputOptions = OutputOptions.can_bus_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: head_node = axis_to_node(Axis.by_mount(mount)) tool = sensor_node_for_pipette(OT3Mount(mount.value)) - csv_output = bool(output_option.value & OutputOptions.stream_to_csv.value) - sync_buffer_output = bool( - output_option.value & OutputOptions.sync_buffer_to_csv.value - ) - can_bus_only_output = bool( - output_option.value & OutputOptions.can_bus_only.value - ) - data_files_transposed = ( - None - if data_files is None - else { - sensor_id_for_instrument(probe): data_files[probe] - for probe in data_files.keys() - } - ) positions = await liquid_probe( messenger=self._messenger, tool=tool, @@ -1400,12 +1388,9 @@ async def liquid_probe( threshold_pascals=threshold_pascals, plunger_impulse_time=plunger_impulse_time, num_baseline_reads=num_baseline_reads, - csv_output=csv_output, - sync_buffer_output=sync_buffer_output, - can_bus_only_output=can_bus_only_output, - data_files=data_files_transposed, sensor_id=sensor_id_for_instrument(probe), force_both_sensors=force_both_sensors, + response_queue=response_queue, ) for node, point in positions.items(): self._position.update({node: point.motor_position}) @@ -1432,41 +1417,13 @@ async def capacitive_probe( speed_mm_per_s: float, sensor_threshold_pf: float, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, - output_option: OutputOptions = OutputOptions.sync_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, ) -> bool: - if output_option == OutputOptions.sync_buffer_to_csv: - assert ( - self._subsystem_manager.device_info[ - SubSystem.of_mount(mount) - ].revision.tertiary - == "1" - ) - csv_output = bool(output_option.value & OutputOptions.stream_to_csv.value) - sync_buffer_output = bool( - output_option.value & OutputOptions.sync_buffer_to_csv.value - ) - can_bus_only_output = bool( - output_option.value & OutputOptions.can_bus_only.value - ) - data_files_transposed = ( - None - if data_files is None - else { - sensor_id_for_instrument(probe): data_files[probe] - for probe in data_files.keys() - } - ) status = await capacitive_probe( messenger=self._messenger, tool=sensor_node_for_mount(mount), mover=axis_to_node(moving), distance=distance_mm, mount_speed=speed_mm_per_s, - csv_output=csv_output, - sync_buffer_output=sync_buffer_output, - can_bus_only_output=can_bus_only_output, - data_files=data_files_transposed, sensor_id=sensor_id_for_instrument(probe), relative_threshold_pf=sensor_threshold_pf, ) diff --git a/api/src/opentrons/hardware_control/backends/ot3simulator.py b/api/src/opentrons/hardware_control/backends/ot3simulator.py index 034531892d8..017c90c45b3 100644 --- a/api/src/opentrons/hardware_control/backends/ot3simulator.py +++ b/api/src/opentrons/hardware_control/backends/ot3simulator.py @@ -17,7 +17,7 @@ Mapping, ) -from opentrons.config.types import OT3Config, GantryLoad, OutputOptions +from opentrons.config.types import OT3Config, GantryLoad from opentrons.config import gripper_config from opentrons.hardware_control.module_control import AttachedModulesControl @@ -63,7 +63,8 @@ from opentrons.util.async_helpers import ensure_yield from .types import HWStopCondition from .flex_protocol import FlexBackend - +from opentrons_hardware.firmware_bindings.constants import SensorId +from opentrons_hardware.sensors.types import SensorDataType log = logging.getLogger(__name__) @@ -347,10 +348,11 @@ async def liquid_probe( threshold_pascals: float, plunger_impulse_time: float, num_baseline_reads: int, - output_format: OutputOptions = OutputOptions.can_bus_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: z_axis = Axis.by_mount(mount) pos = self._position @@ -750,8 +752,6 @@ async def capacitive_probe( speed_mm_per_s: float, sensor_threshold_pf: float, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, - output_format: OutputOptions = OutputOptions.sync_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, ) -> bool: self._position[moving] += distance_mm return True diff --git a/api/src/opentrons/hardware_control/ot3api.py b/api/src/opentrons/hardware_control/ot3api.py index 499592a10eb..856b755565c 100644 --- a/api/src/opentrons/hardware_control/ot3api.py +++ b/api/src/opentrons/hardware_control/ot3api.py @@ -143,7 +143,8 @@ from .backends.flex_protocol import FlexBackend from .backends.ot3simulator import OT3Simulator from .backends.errors import SubsystemUpdating - +from opentrons_hardware.firmware_bindings.constants import SensorId +from opentrons_hardware.sensors.types import SensorDataType mod_log = logging.getLogger(__name__) @@ -2643,6 +2644,9 @@ async def _liquid_probe_pass( probe: InstrumentProbeType, p_travel: float, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: plunger_direction = -1 if probe_settings.aspirate_while_sensing else 1 end_z = await self._backend.liquid_probe( @@ -2653,10 +2657,9 @@ async def _liquid_probe_pass( probe_settings.sensor_threshold_pascals, probe_settings.plunger_impulse_time, probe_settings.samples_for_baselining, - probe_settings.output_option, - probe_settings.data_files, probe=probe, force_both_sensors=force_both_sensors, + response_queue=response_queue, ) machine_pos = await self._backend.update_position() machine_pos[Axis.by_mount(mount)] = end_z @@ -2677,6 +2680,9 @@ async def liquid_probe( # noqa: C901 probe_settings: Optional[LiquidProbeSettings] = None, probe: Optional[InstrumentProbeType] = None, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: """Search for and return liquid level height. @@ -2802,6 +2808,8 @@ async def prep_plunger_for_probe_move( probe_settings, checked_probe, plunger_travel_mm + sensor_baseline_plunger_move_mm, + force_both_sensors, + response_queue, ) # if we made it here without an error we found the liquid error = None @@ -2870,8 +2878,6 @@ async def capacitive_probe( pass_settings.speed_mm_per_s, pass_settings.sensor_threshold_pf, probe, - pass_settings.output_option, - pass_settings.data_files, ) end_pos = await self.gantry_position(mount, refresh=True) if retract_after: diff --git a/api/src/opentrons/protocol_runner/run_orchestrator.py b/api/src/opentrons/protocol_runner/run_orchestrator.py index 697e4a14e3a..69d9feaf524 100644 --- a/api/src/opentrons/protocol_runner/run_orchestrator.py +++ b/api/src/opentrons/protocol_runner/run_orchestrator.py @@ -257,6 +257,22 @@ def get_current_command(self) -> Optional[CommandPointer]: """Get the "current" command, if any.""" return self._protocol_engine.state_view.commands.get_current() + def get_most_recently_finalized_command(self) -> Optional[CommandPointer]: + """Get the most recently finalized command, if any.""" + most_recently_finalized_command = ( + self._protocol_engine.state_view.commands.get_most_recently_finalized_command() + ) + return ( + CommandPointer( + command_id=most_recently_finalized_command.command.id, + command_key=most_recently_finalized_command.command.key, + created_at=most_recently_finalized_command.command.createdAt, + index=most_recently_finalized_command.index, + ) + if most_recently_finalized_command + else None + ) + def get_command_slice( self, cursor: Optional[int], length: int, include_fixit_commands: bool ) -> CommandSlice: diff --git a/api/src/opentrons/util/logging_config.py b/api/src/opentrons/util/logging_config.py index e9a4d2042a2..944f4d3d5ed 100644 --- a/api/src/opentrons/util/logging_config.py +++ b/api/src/opentrons/util/logging_config.py @@ -5,10 +5,13 @@ from opentrons.config import CONFIG, ARCHITECTURE, SystemArchitecture +from opentrons_hardware.sensors import SENSOR_LOG_NAME + def _host_config(level_value: int) -> Dict[str, Any]: serial_log_filename = CONFIG["serial_log_file"] api_log_filename = CONFIG["api_log_file"] + sensor_log_filename = CONFIG["sensor_log_file"] return { "version": 1, "disable_existing_loggers": False, @@ -41,6 +44,14 @@ def _host_config(level_value: int) -> Dict[str, Any]: "level": logging.DEBUG, "backupCount": 5, }, + "sensor": { + "class": "logging.handlers.RotatingFileHandler", + "formatter": "basic", + "filename": sensor_log_filename, + "maxBytes": 1000000, + "level": logging.DEBUG, + "backupCount": 5, + }, }, "loggers": { "opentrons": { @@ -66,6 +77,11 @@ def _host_config(level_value: int) -> Dict[str, Any]: "level": logging.DEBUG, "propagate": False, }, + SENSOR_LOG_NAME: { + "handlers": ["sensor"], + "level": logging.DEBUG, + "propagate": False, + }, "__main__": {"handlers": ["api"], "level": level_value}, }, } @@ -75,6 +91,7 @@ def _buildroot_config(level_value: int) -> Dict[str, Any]: # Import systemd.journald here since it is generally unavailble on non # linux systems and we probably don't want to use it on linux desktops # either + sensor_log_filename = CONFIG["sensor_log_file"] return { "version": 1, "disable_existing_loggers": False, @@ -106,6 +123,14 @@ def _buildroot_config(level_value: int) -> Dict[str, Any]: "formatter": "message_only", "SYSLOG_IDENTIFIER": "opentrons-api-serial-usbbin", }, + "sensor": { + "class": "logging.handlers.RotatingFileHandler", + "formatter": "basic", + "filename": sensor_log_filename, + "maxBytes": 1000000, + "level": logging.DEBUG, + "backupCount": 3, + }, }, "loggers": { "opentrons.drivers.asyncio.communication.serial_connection": { @@ -131,6 +156,11 @@ def _buildroot_config(level_value: int) -> Dict[str, Any]: "level": logging.DEBUG, "propagate": False, }, + SENSOR_LOG_NAME: { + "handlers": ["sensor"], + "level": logging.DEBUG, + "propagate": False, + }, "__main__": {"handlers": ["api"], "level": level_value}, }, } diff --git a/api/tests/opentrons/config/ot3_settings.py b/api/tests/opentrons/config/ot3_settings.py index 38353c05a3c..04370fd6c09 100644 --- a/api/tests/opentrons/config/ot3_settings.py +++ b/api/tests/opentrons/config/ot3_settings.py @@ -1,5 +1,3 @@ -from opentrons.config.types import OutputOptions - ot3_dummy_settings = { "name": "Marie Curie", "model": "OT-3 Standard", @@ -122,13 +120,11 @@ "plunger_speed": 10, "plunger_impulse_time": 0.2, "sensor_threshold_pascals": 17, - "output_option": OutputOptions.stream_to_csv, "aspirate_while_sensing": False, "z_overlap_between_passes_mm": 0.1, "plunger_reset_offset": 2.0, "samples_for_baselining": 20, "sample_time_sec": 0.004, - "data_files": {"PRIMARY": "/data/pressure_sensor_data.csv"}, }, "calibration": { "z_offset": { @@ -137,8 +133,6 @@ "max_overrun_distance_mm": 2, "speed_mm_per_s": 3, "sensor_threshold_pf": 4, - "output_option": OutputOptions.sync_only, - "data_files": None, }, }, "edge_sense": { @@ -149,8 +143,6 @@ "max_overrun_distance_mm": 5, "speed_mm_per_s": 6, "sensor_threshold_pf": 7, - "output_option": OutputOptions.sync_only, - "data_files": None, }, "search_initial_tolerance_mm": 18, "search_iteration_limit": 3, diff --git a/api/tests/opentrons/hardware_control/backends/test_ot3_controller.py b/api/tests/opentrons/hardware_control/backends/test_ot3_controller.py index ac25d19a3e2..5ffee581de4 100644 --- a/api/tests/opentrons/hardware_control/backends/test_ot3_controller.py +++ b/api/tests/opentrons/hardware_control/backends/test_ot3_controller.py @@ -39,7 +39,6 @@ OT3Config, GantryLoad, LiquidProbeSettings, - OutputOptions, ) from opentrons.config.robot_configs import build_config_ot3 from opentrons_hardware.firmware_bindings.arbitration_id import ArbitrationId @@ -61,7 +60,6 @@ UpdateState, EstopState, CurrentConfig, - InstrumentProbeType, ) from opentrons.hardware_control.errors import ( InvalidPipetteName, @@ -180,13 +178,11 @@ def fake_liquid_settings() -> LiquidProbeSettings: plunger_speed=10, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.can_bus_only, aspirate_while_sensing=False, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "fake_file_name"}, ) @@ -707,6 +703,17 @@ async def test_ready_for_movement( assert controller.check_motor_status(axes) == ready +def probe_move_group_run_side_effect( + head: NodeId, tool: NodeId +) -> Iterator[Dict[NodeId, MotorPositionStatus]]: + """Return homed position for axis that is present and was commanded to home.""" + positions = { + head: MotorPositionStatus(0.0, 0.0, True, True, MoveCompleteAck(1)), + tool: MotorPositionStatus(0.0, 0.0, True, True, MoveCompleteAck(1)), + } + yield positions + + @pytest.mark.parametrize("mount", [OT3Mount.LEFT, OT3Mount.RIGHT]) async def test_liquid_probe( mount: OT3Mount, @@ -716,6 +723,11 @@ async def test_liquid_probe( mock_send_stop_threshold: mock.AsyncMock, ) -> None: fake_max_p_dist = 70 + head_node = axis_to_node(Axis.by_mount(mount)) + tool_node = sensor_node_for_mount(mount) + mock_move_group_run.side_effect = probe_move_group_run_side_effect( + head_node, tool_node + ) try: await controller.liquid_probe( mount=mount, @@ -725,18 +737,17 @@ async def test_liquid_probe( threshold_pascals=fake_liquid_settings.sensor_threshold_pascals, plunger_impulse_time=fake_liquid_settings.plunger_impulse_time, num_baseline_reads=fake_liquid_settings.samples_for_baselining, - output_option=fake_liquid_settings.output_option, ) except PipetteLiquidNotFoundError: # the move raises a liquid not found now since we don't call the move group and it doesn't # get any positions back pass move_groups = mock_move_group_run.call_args_list[0][0][0]._move_groups - head_node = axis_to_node(Axis.by_mount(mount)) - tool_node = sensor_node_for_mount(mount) # in tool_sensors, pipette moves down, then sensor move goes assert move_groups[0][0][tool_node].stop_condition == MoveStopCondition.none - assert move_groups[1][0][tool_node].stop_condition == MoveStopCondition.sync_line + assert ( + move_groups[1][0][tool_node].stop_condition == MoveStopCondition.sensor_report + ) assert len(move_groups) == 2 assert move_groups[0][0][tool_node] assert move_groups[1][0][head_node], move_groups[2][0][tool_node] diff --git a/api/tests/opentrons/hardware_control/test_ot3_api.py b/api/tests/opentrons/hardware_control/test_ot3_api.py index 3c574e4373a..064ea087c6b 100644 --- a/api/tests/opentrons/hardware_control/test_ot3_api.py +++ b/api/tests/opentrons/hardware_control/test_ot3_api.py @@ -1,5 +1,6 @@ """ Tests for behaviors specific to the OT3 hardware controller. """ +import asyncio from typing import ( AsyncIterator, Iterator, @@ -26,7 +27,6 @@ GantryLoad, CapacitivePassSettings, LiquidProbeSettings, - OutputOptions, ) from opentrons.hardware_control.dev_types import ( AttachedGripper, @@ -98,6 +98,8 @@ from opentrons.hardware_control.module_control import AttachedModulesControl from opentrons.hardware_control.backends.types import HWStopCondition +from opentrons_hardware.firmware_bindings.constants import SensorId +from opentrons_hardware.sensors.types import SensorDataType # TODO (spp, 2023-08-22): write tests for ot3api.stop & ot3api.halt @@ -109,7 +111,6 @@ def fake_settings() -> CapacitivePassSettings: max_overrun_distance_mm=2, speed_mm_per_s=4, sensor_threshold_pf=1.0, - output_option=OutputOptions.sync_only, ) @@ -120,13 +121,11 @@ def fake_liquid_settings() -> LiquidProbeSettings: plunger_speed=15, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.can_bus_only, aspirate_while_sensing=False, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "fake_file_name"}, ) @@ -488,8 +487,6 @@ def _update_position( speed_mm_per_s: float, threshold_pf: float, probe: InstrumentProbeType, - output_option: OutputOptions = OutputOptions.sync_only, - data_file: Optional[str] = None, ) -> None: hardware_backend._position[moving] += distance_mm / 2 @@ -827,13 +824,11 @@ async def test_liquid_probe( plunger_speed=15, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.can_bus_only, aspirate_while_sensing=True, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "fake_file_name"}, ) fake_max_z_dist = 10.0 non_responsive_z_mm = ot3_hardware.liquid_probe_non_responsive_z_distance( @@ -860,10 +855,9 @@ async def test_liquid_probe( fake_settings_aspirate.sensor_threshold_pascals, fake_settings_aspirate.plunger_impulse_time, fake_settings_aspirate.samples_for_baselining, - fake_settings_aspirate.output_option, - fake_settings_aspirate.data_files, probe=InstrumentProbeType.PRIMARY, force_both_sensors=False, + response_queue=None, ) await ot3_hardware.liquid_probe( @@ -1098,13 +1092,11 @@ async def test_multi_liquid_probe( plunger_speed=71.5, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.can_bus_only, aspirate_while_sensing=True, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "fake_file_name"}, ) fake_max_z_dist = 10.0 await ot3_hardware.liquid_probe( @@ -1119,10 +1111,9 @@ async def test_multi_liquid_probe( fake_settings_aspirate.sensor_threshold_pascals, fake_settings_aspirate.plunger_impulse_time, fake_settings_aspirate.samples_for_baselining, - fake_settings_aspirate.output_option, - fake_settings_aspirate.data_files, probe=InstrumentProbeType.PRIMARY, force_both_sensors=False, + response_queue=None, ) assert mock_liquid_probe.call_count == 3 @@ -1155,10 +1146,11 @@ async def _fake_pos_update_and_raise( threshold_pascals: float, plunger_impulse_time: float, num_baseline_reads: int, - output_format: OutputOptions = OutputOptions.can_bus_only, - data_files: Optional[Dict[InstrumentProbeType, str]] = None, probe: InstrumentProbeType = InstrumentProbeType.PRIMARY, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> float: pos = self._position pos[Axis.by_mount(mount)] += mount_speed * ( @@ -1176,13 +1168,11 @@ async def _fake_pos_update_and_raise( plunger_speed=71.5, plunger_impulse_time=0.2, sensor_threshold_pascals=15, - output_option=OutputOptions.can_bus_only, aspirate_while_sensing=True, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "fake_file_name"}, ) # with a mount speed of 5, pass overlap of 0.5 and a 0.2s delay on z # the actual distance traveled is 3.5mm per pass @@ -1233,8 +1223,6 @@ async def test_capacitive_probe( 4, 1.0, InstrumentProbeType.PRIMARY, - fake_settings.output_option, - fake_settings.data_files, ) original = moving.set_in_point(here, 0) diff --git a/app-shell-odd/src/__tests__/http.test.ts b/app-shell-odd/src/__tests__/http.test.ts index 7b2c72578c0..c7ea4443a96 100644 --- a/app-shell-odd/src/__tests__/http.test.ts +++ b/app-shell-odd/src/__tests__/http.test.ts @@ -9,6 +9,7 @@ import type { Request, Response } from 'node-fetch' vi.mock('../config') vi.mock('node-fetch') +vi.mock('../log') describe('app-shell main http module', () => { beforeEach(() => { diff --git a/app-shell-odd/src/__tests__/update.test.ts b/app-shell-odd/src/__tests__/update.test.ts deleted file mode 100644 index 26adb67684b..00000000000 --- a/app-shell-odd/src/__tests__/update.test.ts +++ /dev/null @@ -1,47 +0,0 @@ -// app-shell self-update tests -import { when } from 'vitest-when' -import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' -import * as http from '../http' -import { registerUpdate, FLEX_MANIFEST_URL } from '../update' -import * as Cfg from '../config' - -import type { Dispatch } from '../types' - -vi.unmock('electron-updater') -vi.mock('electron-updater') -vi.mock('../log') -vi.mock('../config') -vi.mock('../http') -vi.mock('fs-extra') - -describe('update', () => { - let dispatch: Dispatch - let handleAction: Dispatch - - beforeEach(() => { - dispatch = vi.fn() - handleAction = registerUpdate(dispatch) - }) - - afterEach(() => { - vi.resetAllMocks() - }) - - it('handles shell:CHECK_UPDATE with available update', () => { - when(vi.mocked(Cfg.getConfig)) - // @ts-expect-error getConfig mock not recognizing correct type overload - .calledWith('update') - .thenReturn({ - channel: 'latest', - } as any) - - when(vi.mocked(http.fetchJson)) - .calledWith(FLEX_MANIFEST_URL) - .thenResolve({ production: { '5.0.0': {}, '6.0.0': {} } }) - handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) - - expect(vi.mocked(Cfg.getConfig)).toHaveBeenCalledWith('update') - - expect(vi.mocked(http.fetchJson)).toHaveBeenCalledWith(FLEX_MANIFEST_URL) - }) -}) diff --git a/app-shell-odd/src/actions.ts b/app-shell-odd/src/actions.ts index 588dc88b3e4..bb7c0450210 100644 --- a/app-shell-odd/src/actions.ts +++ b/app-shell-odd/src/actions.ts @@ -119,6 +119,7 @@ import type { export const configInitialized = (config: Config): ConfigInitializedAction => ({ type: CONFIG_INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -128,6 +129,7 @@ export const configValueUpdated = ( ): ConfigValueUpdatedAction => ({ type: VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export const customLabwareList = ( diff --git a/app-shell-odd/src/config/index.ts b/app-shell-odd/src/config/index.ts index df8e0cf317d..a67655976d9 100644 --- a/app-shell-odd/src/config/index.ts +++ b/app-shell-odd/src/config/index.ts @@ -5,7 +5,6 @@ import get from 'lodash/get' import forEach from 'lodash/forEach' import mergeOptions from 'merge-options' import yargsParser from 'yargs-parser' - import { UI_INITIALIZED } from '../constants' import * as Cfg from '../constants' import { configInitialized, configValueUpdated } from '../actions' @@ -13,6 +12,7 @@ import systemd from '../systemd' import { createLogger } from '../log' import { DEFAULTS_V12, migrate } from './migrate' import { shouldUpdate, getNextValue } from './update' +import { setUserDataPath } from '../early' import type { ConfigV12, @@ -24,8 +24,6 @@ import type { Config, Overrides } from './types' export * from './types' -export const ODD_DIR = '/data/ODD' - // make sure all arguments are included in production const argv = process.argv0.endsWith('defaultApp') ? process.argv.slice(2) @@ -48,8 +46,7 @@ const store = (): Store => { // perform store migration if loading for the first time _store = (new Store({ defaults: DEFAULTS_V12, - // dont overwrite config dir if in dev mode because it causes issues - ...(process.env.NODE_ENV === 'production' && { cwd: ODD_DIR }), + cwd: setUserDataPath(), }) as unknown) as Store _store.store = migrate((_store.store as unknown) as ConfigV12) } @@ -66,7 +63,14 @@ const log = (): Logger => _log ?? (_log = createLogger('config')) export function registerConfig(dispatch: Dispatch): (action: Action) => void { return function handleIncomingAction(action: Action) { if (action.type === UI_INITIALIZED) { + log().info('initializing configuration') dispatch(configInitialized(getFullConfig())) + log().info( + `flow route: ${ + getConfig('onDeviceDisplaySettings').unfinishedUnboxingFlowRoute + }` + ) + log().info('configuration initialized') } else if ( action.type === Cfg.UPDATE_VALUE || action.type === Cfg.RESET_VALUE || @@ -120,8 +124,8 @@ export function getOverrides(path?: string): unknown { return path != null ? get(overrides(), path) : overrides() } -export function getConfig

(path: P): Config[P] export function getConfig(): Config +export function getConfig

(path: P): Config[P] export function getConfig(path?: any): any { const result = store().get(path) const over = getOverrides(path as string | undefined) diff --git a/app-shell-odd/src/constants.ts b/app-shell-odd/src/constants.ts index a78e9274ae0..8b92e639cf6 100644 --- a/app-shell-odd/src/constants.ts +++ b/app-shell-odd/src/constants.ts @@ -257,3 +257,5 @@ export const FAILURE_STATUSES = { } as const export const SEND_FILE_PATHS: 'shell:SEND_FILE_PATHS' = 'shell:SEND_FILE_PATHS' + +export const ODD_DATA_DIR = '/data/ODD' diff --git a/app-shell-odd/src/early.ts b/app-shell-odd/src/early.ts new file mode 100644 index 00000000000..134c8957804 --- /dev/null +++ b/app-shell-odd/src/early.ts @@ -0,0 +1,22 @@ +// things intended to execute early in app-shell initialization +// do as little as possible in this file and do none of it at import time + +import { app } from 'electron' +import { ODD_DATA_DIR } from './constants' + +let path: string + +export const setUserDataPath = (): string => { + if (path == null) { + console.log( + `node env is ${process.env.NODE_ENV}, path is ${app.getPath('userData')}` + ) + if (process.env.NODE_ENV === 'production') { + console.log(`setting app path to ${ODD_DATA_DIR}`) + app.setPath('userData', ODD_DATA_DIR) + } + path = app.getPath('userData') + console.log(`app path becomes ${app.getPath('userData')}`) + } + return app.getPath('userData') +} diff --git a/app-shell-odd/src/http.ts b/app-shell-odd/src/http.ts index 6392340fbe7..90d01530da8 100644 --- a/app-shell-odd/src/http.ts +++ b/app-shell-odd/src/http.ts @@ -7,10 +7,13 @@ import FormData from 'form-data' import { Transform } from 'stream' import { HTTP_API_VERSION } from './constants' +import { createLogger } from './log' import type { Readable } from 'stream' import type { Request, RequestInit, Response } from 'node-fetch' +const log = createLogger('http') + type RequestInput = Request | string export interface DownloadProgress { @@ -18,6 +21,16 @@ export interface DownloadProgress { size: number | null } +export class LocalAbortError extends Error { + declare readonly name: 'LocalAbortError' + declare readonly type: 'aborted' + constructor(message: string) { + super(message) + this.name = 'LocalAbortError' + this.type = 'aborted' + } +} + export function fetch( input: RequestInput, init?: RequestInit @@ -35,21 +48,29 @@ export function fetch( }) } -export function fetchJson(input: RequestInput): Promise { - return fetch(input).then(response => response.json()) +export function fetchJson( + input: RequestInput, + init?: RequestInit +): Promise { + return fetch(input, init).then(response => response.json()) +} + +export function fetchText(input: Request, init?: RequestInit): Promise { + return fetch(input, init).then(response => response.text()) } -export function fetchText(input: Request): Promise { - return fetch(input).then(response => response.text()) +export interface FetchToFileOptions { + onProgress: (progress: DownloadProgress) => unknown + signal: AbortSignal } // TODO(mc, 2019-07-02): break this function up and test its components export function fetchToFile( input: RequestInput, destination: string, - options?: Partial<{ onProgress: (progress: DownloadProgress) => unknown }> + options?: Partial ): Promise { - return fetch(input).then(response => { + return fetch(input, { signal: options?.signal }).then(response => { let downloaded = 0 const size = Number(response.headers.get('Content-Length')) || null @@ -75,13 +96,26 @@ export function fetchToFile( // pump calls stream.pipe, handles teardown if streams error, and calls // its callbacks when the streams are done pump(inputStream, progressReader, outputStream, error => { - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - if (error) { + const handleError = (problem: Error): void => { // if we error out, delete the temp dir to clean up - return remove(destination).then(() => { + log.error(`Aborting fetchToFile: ${problem.name}: ${problem.message}`) + remove(destination).then(() => { reject(error) }) } + const listener = (): void => { + handleError( + new LocalAbortError( + (options?.signal?.reason as string | null) ?? 'aborted' + ) + ) + } + options?.signal?.addEventListener('abort', listener, { once: true }) + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (error) { + handleError(error) + } + options?.signal?.removeEventListener('abort', listener, {}) resolve(destination) }) }) diff --git a/app-shell-odd/src/log.ts b/app-shell-odd/src/log.ts index 0c6a087be3f..100c7f275fb 100644 --- a/app-shell-odd/src/log.ts +++ b/app-shell-odd/src/log.ts @@ -4,13 +4,13 @@ import path from 'path' import dateFormat from 'dateformat' import winston from 'winston' +import { setUserDataPath } from './early' import { getConfig } from './config' import type Transport from 'winston-transport' import type { Config } from './config' -const ODD_DIR = '/data/ODD' -const LOG_DIR = path.join(ODD_DIR, 'logs') +const LOG_DIR = path.join(setUserDataPath(), 'logs') const ERROR_LOG = path.join(LOG_DIR, 'error.log') const COMBINED_LOG = path.join(LOG_DIR, 'combined.log') diff --git a/app-shell-odd/src/main.ts b/app-shell-odd/src/main.ts index d271bb1dc87..b0f285fa194 100644 --- a/app-shell-odd/src/main.ts +++ b/app-shell-odd/src/main.ts @@ -6,11 +6,7 @@ import path from 'path' import { createUi, waitForRobotServerAndShowMainWindow } from './ui' import { createLogger } from './log' import { registerDiscovery } from './discovery' -import { - registerUpdate, - updateLatestVersion, - registerUpdateBrightness, -} from './update' +import { registerUpdateBrightness } from './system' import { registerRobotSystemUpdate } from './system-update' import { registerAppRestart } from './restart' import { @@ -19,7 +15,6 @@ import { getOverrides, registerConfig, resetStore, - ODD_DIR, } from './config' import systemd from './systemd' import { registerDataFiles, watchForMassStorage } from './usb' @@ -28,7 +23,9 @@ import { establishBrokerConnection, closeBrokerConnection, } from './notifications' +import { setUserDataPath } from './early' +import type { OTLogger } from './log' import type { BrowserWindow } from 'electron' import type { Action, Dispatch, Logger } from './types' import type { LogEntry } from 'winston' @@ -39,6 +36,7 @@ import type { LogEntry } from 'winston' * https://github.com/node-fetch/node-fetch/issues/1624 */ dns.setDefaultResultOrder('ipv4first') +setUserDataPath() systemd.sendStatus('starting app') const config = getConfig() @@ -87,12 +85,14 @@ function startUp(): void { log.info('Starting App') console.log('Starting App') const storeNeedsReset = fse.existsSync( - path.join(ODD_DIR, `_CONFIG_TO_BE_DELETED_ON_REBOOT`) + path.join(setUserDataPath(), `_CONFIG_TO_BE_DELETED_ON_REBOOT`) ) if (storeNeedsReset) { log.debug('store marked to be reset, resetting store') resetStore() - fse.removeSync(path.join(ODD_DIR, `_CONFIG_TO_BE_DELETED_ON_REBOOT`)) + fse.removeSync( + path.join(app.getPath('userData'), `_CONFIG_TO_BE_DELETED_ON_REBOOT`) + ) } systemd.sendStatus('loading app') process.on('uncaughtException', error => log.error('Uncaught: ', { error })) @@ -102,11 +102,28 @@ function startUp(): void { // wire modules to UI dispatches const dispatch: Dispatch = action => { - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - if (mainWindow) { - log.silly('Sending action via IPC to renderer', { action }) - mainWindow.webContents.send('dispatch', action) - } + // This function now dispatches actions to all the handlers in the app shell. That would make it + // vulnerable to infinite recursion: + // - handler handles action A + // - handler dispatches action A as a response (calls this function) + // - this function calls handler with action A + // By deferring to nextTick(), we would still be executing the code over and over but we should have + // broken the stack. + process.nextTick(() => { + // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions + if (mainWindow) { + log.silly('Sending action via IPC to renderer', { action }) + mainWindow.webContents.send('dispatch', action) + } + log.debug( + `bouncing action ${action.type} to ${actionHandlers.length} handlers` + ) + // Make actions that are sourced from the shell also go to the app shell without needing + // round tripping. This call is the reason for the nextTick() above. + actionHandlers.forEach(handler => { + handler(action) + }) + }) } mainWindow = createUi(dispatch) @@ -114,15 +131,9 @@ function startUp(): void { void establishBrokerConnection() mainWindow.once('closed', () => (mainWindow = null)) - log.info('Fetching latest software version') - updateLatestVersion().catch((error: Error) => { - log.error('Error fetching latest software version: ', { error }) - }) - const actionHandlers: Dispatch[] = [ registerConfig(dispatch), registerDiscovery(dispatch), - registerUpdate(dispatch), registerRobotSystemUpdate(dispatch), registerAppRestart(), registerUpdateBrightness(), @@ -143,8 +154,19 @@ function startUp(): void { log.info('First dispatch, showing') systemd.sendStatus('started') systemd.ready() - const stopWatching = watchForMassStorage(dispatch) - ipcMain.once('quit', stopWatching) + try { + const stopWatching = watchForMassStorage(dispatch) + ipcMain.once('quit', stopWatching) + } catch (err: any) { + if (err instanceof Error) { + console.log( + `Failed to watch for mass storage: ${err.name}: ${err.message}`, + err + ) + } else { + console.log(`Failed to watch for mass storage: ${err}`) + } + } // TODO: This is where we render the main window for the first time. See ui.ts // in the createUI function for more. if (!!!mainWindow) { @@ -155,7 +177,7 @@ function startUp(): void { }) } -function createRendererLogger(): Logger { +function createRendererLogger(): OTLogger { log.info('Creating renderer logger') const logger = createLogger('renderer') diff --git a/app-shell-odd/src/system-update/__tests__/handler.test.ts b/app-shell-odd/src/system-update/__tests__/handler.test.ts new file mode 100644 index 00000000000..65769c93729 --- /dev/null +++ b/app-shell-odd/src/system-update/__tests__/handler.test.ts @@ -0,0 +1,777 @@ +// app-shell self-update tests +import { when } from 'vitest-when' +import { rm } from 'fs-extra' +import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' +import tempy from 'tempy' + +import * as Cfg from '../../config' +import { CONFIG_INITIALIZED, VALUE_UPDATED } from '../../constants' +import { + manageDriver, + createUpdateDriver, + CURRENT_SYSTEM_VERSION, +} from '../handler' +import { FLEX_MANIFEST_URL } from '../constants' +import { getSystemUpdateDir as _getSystemUpdateDir } from '../directories' +import { getProvider as _getWebProvider } from '../from-web' +import { getProvider as _getUsbProvider } from '../from-usb' + +import type { UpdateProvider } from '../types' +import type { UpdateDriver } from '../handler' +import type { WebUpdateSource } from '../from-web' +import type { USBUpdateSource } from '../from-usb' +import type { Dispatch } from '../../types' + +import type { + ConfigInitializedAction, + ConfigValueUpdatedAction, +} from '@opentrons/app/src/redux/config' + +vi.unmock('electron-updater') // ? +vi.mock('electron-updater') +vi.mock('../../log') +vi.mock('../../config') +vi.mock('../../http') +vi.mock('../directories') +vi.mock('../from-web') +vi.mock('../from-usb') + +const getSystemUpdateDir = vi.mocked(_getSystemUpdateDir) +const getConfig = vi.mocked(Cfg.getConfig) +const getWebProvider = vi.mocked(_getWebProvider) +const getUsbProvider = vi.mocked(_getUsbProvider) + +describe('update driver manager', () => { + let dispatch: Dispatch + let testDir: string = '' + beforeEach(() => { + const thisTd = tempy.directory() + testDir = thisTd + dispatch = vi.fn() + when(getSystemUpdateDir).calledWith().thenReturn(thisTd) + }) + + afterEach(() => { + vi.resetAllMocks() + const oldTd = testDir + testDir = '' + return oldTd === '' + ? new Promise(resolve => resolve()) + : rm(oldTd, { recursive: true, force: true }) + }) + + it('creates a driver once config is loaded', () => { + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + const driver = manageDriver(dispatch) + expect(driver.getUpdateDriver()).toBeNull() + expect(getConfig).not.toHaveBeenCalled() + return driver + .handleAction({ + type: CONFIG_INITIALIZED, + } as ConfigInitializedAction) + .then(() => { + expect(driver.getUpdateDriver()).not.toBeNull() + expect(getConfig).toHaveBeenCalledOnce() + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + }) + + it('reloads the web driver when appropriate', () => { + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + const fakeProvider = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: vi.fn(), + source: () => (({ channel: 'alpha' } as any) as WebUpdateSource), + } + const fakeProvider2 = { + ...fakeProvider, + source: () => (({ channel: 'beta' } as any) as WebUpdateSource), + } + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider) + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'beta', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider2) + const driverManager = manageDriver(dispatch) + return driverManager + .handleAction({ + type: CONFIG_INITIALIZED, + } as ConfigInitializedAction) + .then(() => { + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + expect(driverManager.getUpdateDriver()).not.toBeNull() + when(fakeProvider.teardown).calledWith().thenResolve() + return driverManager.handleAction({ + type: VALUE_UPDATED, + } as ConfigValueUpdatedAction) + }) + .then(() => { + expect(getWebProvider).toHaveBeenCalledOnce() + when(getConfig) + .calledWith('update') + .thenReturn(({ + channel: 'beta', + } as any) as Cfg.Config['update']) + return driverManager.handleAction({ + type: VALUE_UPDATED, + } as ConfigValueUpdatedAction) + }) + .then(() => { + expect(getWebProvider).toHaveBeenCalledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + }) +}) + +describe('update driver', () => { + let dispatch: Dispatch + let testDir: string = '' + let subject: UpdateDriver | null = null + const fakeProvider: UpdateProvider = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: vi.fn(), + source: () => (({ channel: 'alpha' } as any) as WebUpdateSource), + } + const fakeUsbProviders: Record> = { + first: { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/usb/path', + source: () => + (({ + massStorageRootPath: '/some/usb/path', + } as any) as USBUpdateSource), + }, + } + + beforeEach(() => { + const thisTd = tempy.directory() + testDir = thisTd + dispatch = vi.fn() + when(getSystemUpdateDir).calledWith().thenReturn(thisTd) + when(getConfig) + .calledWith('update') + .thenReturn(({ channel: 'alpha' } as any) as Cfg.Config['update']) + when(getWebProvider) + .calledWith({ + manifestUrl: FLEX_MANIFEST_URL, + channel: 'alpha', + updateCacheDirectory: testDir, + currentVersion: CURRENT_SYSTEM_VERSION, + }) + .thenReturn(fakeProvider) + fakeUsbProviders.first = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/usb/path', + source: () => + (({ + massStorageRootPath: '/some/usb/path', + } as any) as USBUpdateSource), + } + fakeUsbProviders.second = { + teardown: vi.fn(), + refreshUpdateCache: vi.fn(), + getUpdateDetails: vi.fn(), + lockUpdateCache: vi.fn(), + unlockUpdateCache: vi.fn(), + name: () => '/some/other/usb/path', + source: () => + (({ + massStorageRootPath: '/some/other/usb/path', + } as any) as USBUpdateSource), + } + subject = createUpdateDriver(dispatch) + }) + + afterEach(() => { + vi.resetAllMocks() + const oldTd = testDir + testDir = '' + return ( + subject?.teardown() || new Promise(resolve => resolve()) + ).then(() => + oldTd === '' + ? new Promise(resolve => resolve()) + : rm(oldTd, { recursive: true, force: true }) + ) + }) + + it('checks updates when told to check updates', () => { + const thisSubject = subject as UpdateDriver + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenDo( + progress => + new Promise(resolve => { + progress({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + resolve({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + }) + ) + return thisSubject + .handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: null, + releaseNotes: null, + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: null, force: false, target: 'flex' }, + }) + }) + }) + it('forwards in-progress downloads when no USB updates are present', () => { + const thisSubject = subject as UpdateDriver + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenDo( + progress => + new Promise(resolve => { + progress({ + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: null, + downloadProgress: 0, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: null, + downloadProgress: 50, + releaseNotes: null, + }) + progress({ + version: '1.2.3', + files: { + system: '/some/path', + releaseNotes: '/some/other/path', + }, + downloadProgress: 100, + releaseNotes: 'some release notes', + }) + resolve({ + version: '1.2.3', + files: { + system: '/some/path', + releaseNotes: '/some/other/path', + }, + downloadProgress: 100, + releaseNotes: 'some release notes', + }) + }) + ) + return thisSubject + .handleAction({ type: 'shell:CHECK_UPDATE', meta: { shell: true } }) + .then(() => { + expect(dispatch).toHaveBeenNthCalledWith(1, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(2, { + type: 'robotUpdate:DOWNLOAD_PROGRESS', + payload: { progress: 50, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(3, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: '1.2.3', + releaseNotes: 'some release notes', + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(4, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + expect(dispatch).toHaveBeenNthCalledWith(5, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + version: '1.2.3', + releaseNotes: 'some release notes', + force: false, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(6, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '1.2.3', force: false, target: 'flex' }, + }) + }) + }) + it('creates a usb provider when it gets a message that a usb device was added', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + }) + }) + it('does not create a usb provider if it already has one for a path', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledOnce() + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_INFO', + payload: { + releaseNotes: 'some fake notes', + version: '0.1.2', + force: true, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: '0.1.2', + force: true, + target: 'flex', + }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '0.1.2', + isManualFile: false, + }, + }) + }) + }) + it('tears down a usb provider when it is removed', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + when(fakeUsbProviders.first.teardown).calledWith().thenResolve() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED', + payload: { rootPath: '/some/usb/path' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(fakeUsbProviders.first.teardown).toHaveBeenCalledOnce() + }) + }) + it('re-adds a usb provider if it is inserted after being removed', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + when(fakeUsbProviders.first.teardown).calledWith().thenResolve() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED', + payload: { rootPath: '/some/usb/path' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(fakeUsbProviders.first.teardown).toHaveBeenCalledOnce() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledTimes(2) + }) + }) + it('prefers usb updates to web updates', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeProvider.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { + system: '/some/file/from/the/web', + releaseNotes: null, + }, + releaseNotes: 'some other notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => + thisSubject.handleAction({ + type: 'shell:CHECK_UPDATE', + meta: { shell: true }, + }) + ) + .then(() => { + expect(dispatch).toHaveBeenLastCalledWith({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version: '0.1.2', force: true, target: 'flex' }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '0.1.2', + isManualFile: false, + }, + }) + }) + }) + it('selects the highest version usb update', () => { + const thisSubject = subject as UpdateDriver + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + .thenReturn(fakeUsbProviders.first) + when(getUsbProvider) + .calledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/other/usb/path', + massStorageDeviceFiles: ['/some/third/file', '/some/fourth/file'], + }) + .thenReturn(fakeUsbProviders.second) + when(fakeUsbProviders.first.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.second.refreshUpdateCache) + .calledWith(expect.any(Function)) + .thenResolve({ + version: '0.1.2', + files: { system: '/some/other/file', releaseNotes: null }, + releaseNotes: 'some other fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.first.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '1.2.3', + files: { system: '/some/file', releaseNotes: null }, + releaseNotes: 'some fake notes', + downloadProgress: 100, + }) + when(fakeUsbProviders.second.getUpdateDetails) + .calledWith() + .thenReturn({ + version: '0.1.2', + files: { system: '/some/other/filefile', releaseNotes: null }, + releaseNotes: 'some other fake notes', + downloadProgress: 100, + }) + return thisSubject + .handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/usb/path', + filePaths: ['/some/file', '/some/other/file'], + }, + meta: { shell: true }, + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', + payload: { + rootPath: '/some/other/usb/path', + filePaths: ['/some/third/file', '/some/fourth/file'], + }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(getUsbProvider).toHaveBeenCalledWith({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: '/some/usb/path', + massStorageDeviceFiles: ['/some/file', '/some/other/file'], + }) + expect(dispatch).toHaveBeenNthCalledWith(1, { + type: 'robotUpdate:UPDATE_INFO', + payload: { + releaseNotes: 'some fake notes', + version: '1.2.3', + force: true, + target: 'flex', + }, + }) + expect(dispatch).toHaveBeenNthCalledWith(2, { + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: '1.2.3', + force: true, + target: 'flex', + }, + }) + }) + .then(() => { + vi.mocked(dispatch).mockReset() + return thisSubject.handleAction({ + type: 'robotUpdate:READ_SYSTEM_FILE', + payload: { target: 'flex' }, + meta: { shell: true }, + }) + }) + .then(() => { + expect(dispatch).toHaveBeenCalledWith({ + type: 'robotUpdate:FILE_INFO', + payload: { + systemFile: '/some/file', + version: '1.2.3', + isManualFile: false, + }, + }) + }) + }) +}) diff --git a/app-shell-odd/src/system-update/__tests__/release-files.test.ts b/app-shell-odd/src/system-update/__tests__/release-files.test.ts deleted file mode 100644 index bd2a421b910..00000000000 --- a/app-shell-odd/src/system-update/__tests__/release-files.test.ts +++ /dev/null @@ -1,72 +0,0 @@ -// TODO(mc, 2020-06-11): test all release-files functions -import { vi, describe, it, expect, afterAll } from 'vitest' -import path from 'path' -import { promises as fs } from 'fs' -import fse from 'fs-extra' -import tempy from 'tempy' - -import { cleanupReleaseFiles } from '../release-files' -vi.mock('electron-store') -vi.mock('../../log') - -describe('system release files utilities', () => { - const tempDirs: string[] = [] - const makeEmptyDir = (): string => { - const dir: string = tempy.directory() - tempDirs.push(dir) - return dir - } - - afterAll(async () => { - await Promise.all(tempDirs.map(d => fse.remove(d))) - }) - - describe('cleanupReleaseFiles', () => { - it('should leave current version files alone', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - - return fs - .mkdir(releaseDir) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0']) - }) - }) - - it('should leave support files alone', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - const releaseManifest = path.join(dir, 'releases.json') - - return Promise.all([ - fs.mkdir(releaseDir), - fse.writeJson(releaseManifest, { hello: 'world' }), - ]) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0', 'releases.json']) - }) - }) - - it('should delete other directories', () => { - const dir = makeEmptyDir() - const releaseDir = path.join(dir, '4.0.0') - const oldReleaseDir = path.join(dir, '3.9.0') - const olderReleaseDir = path.join(dir, '3.8.0') - - return Promise.all([ - fs.mkdir(releaseDir), - fs.mkdir(oldReleaseDir), - fs.mkdir(olderReleaseDir), - ]) - .then(() => cleanupReleaseFiles(dir, '4.0.0')) - .then(() => fs.readdir(dir)) - .then(files => { - expect(files).toEqual(['4.0.0']) - }) - }) - }) -}) diff --git a/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts b/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts deleted file mode 100644 index 89091d2731c..00000000000 --- a/app-shell-odd/src/system-update/__tests__/release-manifest.test.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { describe, it, vi, beforeEach, afterEach, expect } from 'vitest' -import * as Http from '../../http' -import * as Dirs from '../directories' -import { downloadAndCacheReleaseManifest } from '../release-manifest' - -vi.mock('../../http') -vi.mock('../directories') -vi.mock('../../log') -vi.mock('electron-store') -const fetchJson = Http.fetchJson -const getManifestCacheDir = Dirs.getManifestCacheDir - -const MOCK_DIR = 'mock_dir' -const MANIFEST_URL = 'http://example.com/releases.json' -const MOCK_MANIFEST = {} as any - -describe('release manifest utilities', () => { - beforeEach(() => { - vi.mocked(getManifestCacheDir).mockReturnValue(MOCK_DIR) - vi.mocked(fetchJson).mockResolvedValue(MOCK_MANIFEST) - }) - - afterEach(() => { - vi.resetAllMocks() - }) - - it('should download and save the manifest from a url', async () => { - await expect( - downloadAndCacheReleaseManifest(MANIFEST_URL) - ).resolves.toEqual(MOCK_MANIFEST) - expect(fetchJson).toHaveBeenCalledWith(MANIFEST_URL) - }) - - it('should pull the manifest from the file if the manifest download fails', async () => { - const error = new Error('Failed to download') - vi.mocked(fetchJson).mockRejectedValue(error) - await expect( - downloadAndCacheReleaseManifest(MANIFEST_URL) - ).resolves.toEqual(MOCK_MANIFEST) - expect(fetchJson).toHaveBeenCalledWith(MANIFEST_URL) - }) -}) diff --git a/app-shell-odd/src/system-update/constants.ts b/app-shell-odd/src/system-update/constants.ts new file mode 100644 index 00000000000..575b64230b5 --- /dev/null +++ b/app-shell-odd/src/system-update/constants.ts @@ -0,0 +1,11 @@ +const OPENTRONS_PROJECT: string = _OPENTRONS_PROJECT_ + +export const FLEX_MANIFEST_URL = + OPENTRONS_PROJECT && OPENTRONS_PROJECT.includes('robot-stack') + ? 'https://builds.opentrons.com/ot3-oe/releases.json' + : 'https://ot3-development.builds.opentrons.com/ot3-oe/releases.json' + +export const SYSTEM_UPDATE_DIRECTORY = '__ot_system_update__' +export const VERSION_FILENAME = 'VERSION.json' +export const REASONABLE_VERSION_FILE_SIZE_B = 4096 +export const SYSTEM_FILENAME = 'system-update.zip' diff --git a/app-shell-odd/src/system-update/directories.ts b/app-shell-odd/src/system-update/directories.ts index c2723153505..757f47bc44a 100644 --- a/app-shell-odd/src/system-update/directories.ts +++ b/app-shell-odd/src/system-update/directories.ts @@ -1,15 +1,6 @@ import { app } from 'electron' import path from 'path' +import { SYSTEM_UPDATE_DIRECTORY } from './constants' -const SYSTEM_UPDATE_DIRECTORY = path.join( - app.getPath('sessionData'), - '__ot_system_update__' -) - -export const getSystemUpdateDir = (): string => SYSTEM_UPDATE_DIRECTORY - -export const getFileDownloadDir = (version: string): string => - path.join(SYSTEM_UPDATE_DIRECTORY, version) - -export const getManifestCacheDir = (): string => - path.join(SYSTEM_UPDATE_DIRECTORY, 'releases.json') +export const getSystemUpdateDir = (): string => + path.join(app.getPath('userData'), SYSTEM_UPDATE_DIRECTORY) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts new file mode 100644 index 00000000000..cbdf79435dc --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/provider.test.ts @@ -0,0 +1,205 @@ +import { it, describe, vi, afterEach, expect } from 'vitest' +import { when } from 'vitest-when' +import { getProvider } from '../provider' +import { getLatestMassStorageUpdateFile as _getLatestMassStorageUpdateFile } from '../scan-device' + +vi.mock('../scan-device') +vi.mock('../../../log') + +const getLatestMassStorageUpdateFile = vi.mocked( + _getLatestMassStorageUpdateFile +) + +describe('system-update/from-usb/provider', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('signals available updates when given available updates', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.2.3' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: '1.2.3', + files: { + system: '/storage/valid-release.zip', + releaseNotes: expect.any(String), + }, + releaseNotes: expect.any(String), + downloadProgress: 100, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when given no available updates', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/blahblah']) + .thenResolve(null) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/blahblah'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when the scan throws', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/blahblah']) + .thenReject(new Error('oh no')) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/blahblah'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('signals no available update when the highest version update is the same version as current', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => { + expect(progress).toHaveBeenLastCalledWith(expectedUpdate) + }) + }) + it('throws when torn down before scanning', () => { + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/', + massStorageDeviceFiles: [], + }) + const progress = vi.fn() + return provider + .teardown() + .then(() => + expect(provider.refreshUpdateCache(progress)).rejects.toThrow() + ) + .then(() => + expect(progress).toHaveBeenLastCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) + it('throws when torn down right after scanning', () => { + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/', + massStorageDeviceFiles: [], + }) + const progress = vi.fn() + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenDo(() => + provider.teardown().then(() => ({ + path: '/storage/valid-release.zip', + version: '1.0.0', + })) + ) + return provider + .teardown() + .then(() => + expect(provider.refreshUpdateCache(progress)).rejects.toThrow() + ) + .then(() => + expect(progress).toHaveBeenLastCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) + it('will not run two checks at once', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + const first = provider.refreshUpdateCache(progress) + const second = provider.refreshUpdateCache(progress) + return Promise.all([ + expect(first).resolves.toEqual(expectedUpdate), + expect(second).rejects.toThrow(), + ]).then(() => expect(getLatestMassStorageUpdateFile).toHaveBeenCalledOnce()) + }) + it('will run a second check after the first ends', () => { + when(getLatestMassStorageUpdateFile) + .calledWith(['/storage/valid-release.zip']) + .thenResolve({ path: '/storage/valid-release.zip', version: '1.0.0' }) + const progress = vi.fn() + const provider = getProvider({ + currentVersion: '1.0.0', + massStorageDeviceRoot: '/storage', + massStorageDeviceFiles: ['/storage/valid-release.zip'], + }) + const expectedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + return expect(provider.refreshUpdateCache(progress)) + .resolves.toEqual(expectedUpdate) + .then(() => + expect(provider.refreshUpdateCache(progress)).resolves.toEqual( + expectedUpdate + ) + ) + }) +}) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts new file mode 100644 index 00000000000..ff51e89abf3 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/scan-device.test.ts @@ -0,0 +1,59 @@ +import { describe, it, expect, vi, afterEach } from 'vitest' +import { when } from 'vitest-when' + +import { getVersionFromZipIfValid as _getVersionFromZipIfValid } from '../scan-zip' +import { getLatestMassStorageUpdateFile } from '../scan-device' +vi.mock('../../../log') +vi.mock('../scan-zip') +const getVersionFromZipIfValid = vi.mocked(_getVersionFromZipIfValid) + +describe('system-update/from-usb/scan-device', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('returns the single file passed in', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenResolve({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + return expect( + getLatestMassStorageUpdateFile(['/some/random/zip/file.zip']) + ).resolves.toEqual({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + }) + it('returns null if no files are passed in', () => + expect(getLatestMassStorageUpdateFile([])).resolves.toBeNull()) + it('returns null if no suitable zips are found', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenReject(new Error('no version found')) + return expect( + getLatestMassStorageUpdateFile(['/some/random/zip/file.zip']) + ).resolves.toBeNull() + }) + it('checks only the zip file', () => { + when(getVersionFromZipIfValid) + .calledWith('/some/random/zip/file.zip') + .thenResolve({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + return expect( + getLatestMassStorageUpdateFile([ + '/some/random/zip/file.zip', + '/some/other/random/file', + ]) + ) + .resolves.toEqual({ path: '/some/random/zip/file.zip', version: '0.0.1' }) + .then(() => expect(getVersionFromZipIfValid).toHaveBeenCalledOnce()) + }) + it('returns the highest version', () => { + when(getVersionFromZipIfValid) + .calledWith('higher-version.zip') + .thenResolve({ path: 'higher-version.zip', version: '1.0.0' }) + when(getVersionFromZipIfValid) + .calledWith('lower-version.zip') + .thenResolve({ path: 'higher-version.zip', version: '1.0.0-alpha.0' }) + return expect( + getLatestMassStorageUpdateFile([ + 'higher-version.zip', + 'lower-version.zip', + ]) + ).resolves.toEqual({ path: 'higher-version.zip', version: '1.0.0' }) + }) +}) diff --git a/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts b/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts new file mode 100644 index 00000000000..226267a5a11 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/__tests__/scan-zip.test.ts @@ -0,0 +1,151 @@ +import { it, describe, expect, vi } from 'vitest' +import path from 'path' +import { exec as _exec } from 'child_process' +import { promisify } from 'util' +import { writeFile, mkdir } from 'fs/promises' +import { REASONABLE_VERSION_FILE_SIZE_B } from '../../constants' +import { directoryWithCleanup } from '../../utils' +import { getVersionFromZipIfValid } from '../scan-zip' + +vi.mock('../../../log') +const exec = promisify(_exec) + +const zipCommand = ( + tempDir: string, + zipName?: string, + zipContentSubDirectory?: string +): string => + `zip -j ${path.join(tempDir, zipName ?? 'test.zip')} ${path.join( + tempDir, + zipContentSubDirectory ?? 'test', + '*' + )}` + +describe('system-update/from-usb/scan-zip', () => { + it('should read version data from a valid zip file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-3 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).resolves.toEqual({ + path: path.join(directory, 'test.zip'), + version: '1.2.3', + }) + ) + )) + + it('should throw if there is no version file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => writeFile(path.join(directory, 'test', 'dummy'), 'lalala')) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is too big', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + `{data: "${'a'.repeat(REASONABLE_VERSION_FILE_SIZE_B + 1)}"}` + ) + ) + .then(() => + exec( + `head -c ${ + REASONABLE_VERSION_FILE_SIZE_B + 1 + } /dev/zero > ${path.join(directory, 'test', 'VERSION.json')} ` + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is not valid json', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile(path.join(directory, 'test', 'VERSION.json'), 'asdaasdas') + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if the version file is for OT-2', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-2 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => exec(zipCommand(directory))) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if not given a zip file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => writeFile(path.join(directory, 'test.zip'), 'aosidasdasd')) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) + it('should throw if given a zip file with internal directories', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'test')) + .then(() => + writeFile( + path.join(directory, 'test', 'VERSION.json'), + JSON.stringify({ + robot_type: 'OT-3 Standard', + opentrons_api_version: '1.2.3', + }) + ) + ) + .then(() => + exec( + `zip ${path.join(directory, 'test.zip')} ${path.join( + directory, + 'test', + '*' + )}` + ) + ) + .then(() => + expect( + getVersionFromZipIfValid(path.join(directory, 'test.zip')) + ).rejects.toThrow() + ) + )) +}) diff --git a/app-shell-odd/src/system-update/from-usb/index.ts b/app-shell-odd/src/system-update/from-usb/index.ts new file mode 100644 index 00000000000..9ae1d7e4751 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/index.ts @@ -0,0 +1,2 @@ +export { getProvider } from './provider' +export type { USBUpdateSource } from './provider' diff --git a/app-shell-odd/src/system-update/from-usb/provider.ts b/app-shell-odd/src/system-update/from-usb/provider.ts new file mode 100644 index 00000000000..53913fab790 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/provider.ts @@ -0,0 +1,111 @@ +import tempy from 'tempy' +import path from 'path' +import { rm, writeFile } from 'fs/promises' +import type { UpdateProvider, ResolvedUpdate, ProgressCallback } from '../types' +import { getLatestMassStorageUpdateFile } from './scan-device' +import { createLogger } from '../../log' + +export interface USBUpdateSource { + currentVersion: string + massStorageDeviceRoot: string + massStorageDeviceFiles: string[] +} + +const fakeReleaseNotesForMassStorage = (version: string): string => ` +# Opentrons Robot Software Version ${version} + +This update is from a USB mass storage device connected to your Flex, and release notes cannot be shown. + +Don't remove the USB mass storage device while the update is in progress. +` +const log = createLogger('system-updates/from-usb') + +export function getProvider( + from: USBUpdateSource +): UpdateProvider { + const noUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + let currentUpdate: ResolvedUpdate = noUpdate + let canceller = new AbortController() + let currentCheck: Promise | null = null + const tempdir = tempy.directory() + let tornDown = false + + const checkUpdates = async ( + progress: ProgressCallback + ): Promise => { + const myCanceller = canceller + if (myCanceller.signal.aborted || tornDown) { + progress(noUpdate) + throw new Error('cache torn down') + } + const updateFile = await getLatestMassStorageUpdateFile( + from.massStorageDeviceFiles + ).catch(() => null) + if (myCanceller.signal.aborted) { + progress(noUpdate) + throw new Error('cache torn down') + } + if (updateFile == null) { + log.info(`No update file in presented files`) + progress(noUpdate) + currentUpdate = noUpdate + return noUpdate + } + log.info(`Update file found for version ${updateFile.version}`) + if (updateFile.version === from.currentVersion) { + progress(noUpdate) + currentUpdate = noUpdate + return noUpdate + } + await writeFile( + path.join(tempdir, 'dummy-release-notes.md'), + fakeReleaseNotesForMassStorage(updateFile.version) + ) + if (myCanceller.signal.aborted) { + progress(noUpdate) + throw new Error('cache torn down') + } + const update = { + version: updateFile.version, + files: { + system: updateFile.path, + releaseNotes: path.join(tempdir, 'dummy-release-notes.md'), + }, + releaseNotes: fakeReleaseNotesForMassStorage(updateFile.version), + downloadProgress: 100, + } as const + currentUpdate = update + progress(update) + return update + } + return { + refreshUpdateCache: progressCallback => { + if (currentCheck != null) { + return new Promise((resolve, reject) => { + reject(new Error('Check already ongoing')) + }) + } + const updatePromise = checkUpdates(progressCallback) + currentCheck = updatePromise + return updatePromise.finally(() => { + currentCheck = null + }) + }, + getUpdateDetails: () => currentUpdate, + lockUpdateCache: () => {}, + unlockUpdateCache: () => {}, + teardown: () => { + canceller.abort() + tornDown = true + canceller = new AbortController() + return rm(tempdir, { recursive: true, force: true }) + }, + name: () => `USBUpdateProvider from ${from.massStorageDeviceRoot}`, + source: () => from, + } +} diff --git a/app-shell-odd/src/system-update/from-usb/scan-device.ts b/app-shell-odd/src/system-update/from-usb/scan-device.ts new file mode 100644 index 00000000000..0c0e7f3e40c --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/scan-device.ts @@ -0,0 +1,37 @@ +import Semver from 'semver' +import { getVersionFromZipIfValid } from './scan-zip' +import type { FileDetails } from './scan-zip' + +import { createLogger } from '../../log' +const log = createLogger('system-udpate/from-usb/scan-device') + +const higherVersion = (a: FileDetails | null, b: FileDetails): FileDetails => + a == null ? b : Semver.gt(a.version, b.version) ? a : b + +const mostRecentUpdateOf = (candidates: FileDetails[]): FileDetails | null => + candidates.reduce( + (prev, current) => higherVersion(prev, current), + null + ) + +const getMassStorageUpdateFiles = ( + filePaths: string[] +): Promise => + Promise.all( + filePaths.map(path => + path.endsWith('.zip') + ? getVersionFromZipIfValid(path).catch(() => null) + : new Promise(resolve => { + resolve(null) + }) + ) + ).then(values => { + const filtered = values.filter(entry => entry != null) as FileDetails[] + log.debug(`scan device found ${filtered}`) + return filtered + }) + +export const getLatestMassStorageUpdateFile = ( + filePaths: string[] +): Promise => + getMassStorageUpdateFiles(filePaths).then(mostRecentUpdateOf) diff --git a/app-shell-odd/src/system-update/from-usb/scan-zip.ts b/app-shell-odd/src/system-update/from-usb/scan-zip.ts new file mode 100644 index 00000000000..b6bce376096 --- /dev/null +++ b/app-shell-odd/src/system-update/from-usb/scan-zip.ts @@ -0,0 +1,88 @@ +import StreamZip from 'node-stream-zip' +import Semver from 'semver' +import { createLogger } from '../../log' +import { REASONABLE_VERSION_FILE_SIZE_B, VERSION_FILENAME } from '../constants' + +const log = createLogger('system-update/from-usb/scan-zip') + +export interface FileDetails { + path: string + version: string +} + +export const getVersionFromZipIfValid = (path: string): Promise => + new Promise((resolve, reject) => { + const zip = new StreamZip({ file: path, storeEntries: true }) + zip.on('ready', () => { + log.info(`Reading zip from ${path}`) + getVersionFromOpenedZipIfValid(zip) + .then(version => { + log.info(`Zip at ${path} has version ${version}`) + zip.close() + resolve({ version, path }) + }) + .catch(err => { + log.info( + `Zip at ${path} was read but could not be parsed: ${err.name}: ${err.message}` + ) + zip.close() + reject(err) + }) + }) + zip.on('error', err => { + log.info(`Zip at ${path} could not be read: ${err.name}: ${err.message}`) + zip.close() + reject(err) + }) + }) + +export const getVersionFromOpenedZipIfValid = ( + zip: StreamZip +): Promise => + new Promise((resolve, reject) => { + const found = Object.values(zip.entries()).reduce((prev, entry) => { + log.debug( + `Checking if ${entry.name} is ${VERSION_FILENAME}, is a file (${entry.isFile}), and ${entry.size}<${REASONABLE_VERSION_FILE_SIZE_B}` + ) + if ( + entry.isFile && + entry.name === VERSION_FILENAME && + entry.size < REASONABLE_VERSION_FILE_SIZE_B + ) { + log.debug(`${entry.name} is a version file candidate`) + const contents = zip.entryDataSync(entry.name).toString('ascii') + log.debug(`version contents: ${contents}`) + try { + const parsedContents = JSON.parse(contents) + if (parsedContents?.robot_type !== 'OT-3 Standard') { + reject(new Error('not a Flex release file')) + } + const fileVersion = parsedContents?.opentrons_api_version + const version = Semver.valid(fileVersion as string) + if (version === null) { + reject(new Error(`${fileVersion} is not a valid version`)) + return prev + } else { + log.info(`Found version file version ${version}`) + resolve(version) + return true + } + } catch (err: any) { + if (err instanceof Error) { + log.error( + `Failed to read ${entry.name}: ${err.name}: ${err.message}` + ) + } else { + log.error(`Failed to ready ${entry.name}: ${err}`) + } + reject(err) + return prev + } + } else { + return prev + } + }, false) + if (!found) { + reject(new Error('No version file found in zip')) + } + }) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts new file mode 100644 index 00000000000..b07d6947861 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/latest-update.test.ts @@ -0,0 +1,40 @@ +import { describe, it, expect } from 'vitest' +import { latestVersionForChannel } from '../latest-update' + +describe('latest-update', () => { + it.each([ + ['8.0.0', '7.0.0', '8.0.0', ''], + ['7.0.0', '8.0.0', '8.0.0', ''], + ['8.10.0', '8.9.0', '8.10.0', ''], + ['8.9.0', '8.10.0', '8.10.0', ''], + ['8.0.0-alpha.0', '8.0.0-alpha.1', '8.0.0-alpha.1', 'alpha'], + ['8.0.0-alpha.1', '8.0.0-alpha.0', '8.0.0-alpha.1', 'alpha'], + ['8.1.0-alpha.0', '8.0.0-alpha.1', '8.1.0-alpha.0', 'alpha'], + ['8.0.0-alpha.1', '8.1.0-alpha.0', '8.1.0-alpha.0', 'alpha'], + ])( + 'choosing between %s and %s should result in %s', + (first, second, higher, channel) => { + expect(latestVersionForChannel([first, second], channel)).toEqual(higher) + } + ) + it('ignores updates from different channels', () => { + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'production' + ) + ).toEqual('8.0.0') + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'alpha' + ) + ).toEqual('9.0.0-alpha.0') + expect( + latestVersionForChannel( + ['8.0.0', '9.0.0-alpha.0', '10.0.0-beta.1', '2.0.0'], + 'beta' + ) + ).toEqual('10.0.0-beta.1') + }) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts new file mode 100644 index 00000000000..3ffe2e4ec08 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/provider.test.ts @@ -0,0 +1,774 @@ +import { vi, describe, it, expect, afterEach } from 'vitest' +import { when } from 'vitest-when' + +import { LocalAbortError } from '../../../http' +import { getProvider } from '../provider' +import { getOrDownloadManifest as _getOrDownloadManifest } from '../release-manifest' +import { cleanUpAndGetOrDownloadReleaseFiles as _cleanUpAndGetOrDownloadReleaseFiles } from '../release-files' + +vi.mock('../../../log') +vi.mock('../release-manifest', async importOriginal => { + // eslint-disable-next-line @typescript-eslint/consistent-type-imports + const original = await importOriginal() + return { + ...original, + getOrDownloadManifest: vi.fn(), + } +}) +vi.mock('../release-files') + +const getOrDownloadManifest = vi.mocked(_getOrDownloadManifest) +const cleanUpAndGetOrDownloadReleaseFiles = vi.mocked( + _cleanUpAndGetOrDownloadReleaseFiles +) + +describe('provider.refreshUpdateCache happy paths', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('says there is no update if the latest version is the current version', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + .then(() => { + expect(progressCallback).toHaveBeenCalledWith({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(cleanUpAndGetOrDownloadReleaseFiles).not.toHaveBeenCalled() + }) + }) + it('says there is an update if a cached update is needed', () => { + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'oh look some release notes cool', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes cool', + downloadProgress: 100, + }) + .then(() => + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes cool', + downloadProgress: 100, + }) + ) + }) + it('says there is an update and forwards progress if an update download is needed', () => { + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'oh look some release notes sweet', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenDo( + ( + _releaseUrls, + _cacheDir, + _version, + progressCallback, + _abortController + ) => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 0 }) + resolve() + }) + .then( + () => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 50 }) + resolve() + }) + ) + .then( + () => + new Promise(resolve => { + progressCallback({ size: 100, downloaded: 100 }) + resolve(releaseData) + }) + ) + ) + + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + expect(provider.getUpdateDetails()).toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + .then(() => { + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 50, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: null, + releaseNotes: null, + downloadProgress: 100, + }) + expect(progressCallback).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh look some release notes sweet', + downloadProgress: 100, + }) + }) + }) +}) + +describe('provider.refreshUpdateCache locking', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('will not start a refresh when locked', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + provider.lockUpdateCache() + return expect(provider.refreshUpdateCache(vi.fn())).rejects.toThrow() + }) + it('will start a refresh when locked then unlocked', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + provider.lockUpdateCache() + provider.unlockUpdateCache() + return expect(provider.refreshUpdateCache(vi.fn())).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + }) + it('will abort when locked in the manifest phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { ...releaseFiles, releaseNotesContent: 'oh hello' } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenDo( + (_manifestUrl, _cacheDirectory, abortController) => + new Promise((resolve, reject) => { + abortController.signal.addEventListener( + 'abort', + () => { + reject(new LocalAbortError(abortController.signal.reason)) + }, + { once: true } + ) + provider.lockUpdateCache() + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'oh hello', + downloadProgress: 100, + }) + ) + }) + it('will abort when locked between manifest and download phases and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { ...releaseFiles, releaseNotesContent: 'hi' } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + expect.any(String), + expect.any(String), + expect.any(AbortController) + ) + .thenDo( + () => + new Promise(resolve => { + provider.lockUpdateCache() + resolve({ production: { '1.2.3': releaseUrls } }) + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'hi', + downloadProgress: 100, + }) + ) + }) + it('will abort when locked in the file download phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'content', + } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + expect.any(Object), + expect.any(String), + expect.any(String), + expect.any(Function), + expect.any(AbortController) + ) + .thenDo( + ( + _releaseUrls, + _cacheDirectory, + _version, + _progress, + abortController + ) => + new Promise((resolve, reject) => { + abortController.signal.addEventListener( + 'abort', + () => { + reject(new LocalAbortError(abortController.signal.reason)) + }, + { once: true } + ) + provider.lockUpdateCache() + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + ) + }) + .then(() => { + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'content', + downloadProgress: 100, + }) + }) + }) + it('will abort when locked in the last-chance phase and return the previous update', () => { + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.0.0', + }) + const releaseUrls = { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + const releaseFiles = { + system: '/some/random/directory/cached-release-1.2.3/ot3-system.zip', + releaseNotes: + '/some/random/directory/cached-release-1.2.3/releaseNotes.md', + } + const releaseData = { + ...releaseFiles, + releaseNotesContent: 'there is some', + } + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + releaseUrls, + '/some/random/directory/versions', + '1.2.3', + expect.any(Function), + expect.any(Object) + ) + .thenResolve(releaseData) + + return expect(provider.refreshUpdateCache(vi.fn())) + .resolves.toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + .then(() => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': releaseUrls, + }, + }) + when(cleanUpAndGetOrDownloadReleaseFiles) + .calledWith( + expect.any(Object), + expect.any(String), + expect.any(String), + expect.any(Function), + expect.any(AbortController) + ) + .thenDo( + ( + _releaseUrls, + _cacheDirectory, + _version, + _progress, + _abortController + ) => + new Promise(resolve => { + provider.lockUpdateCache() + resolve(releaseData) + }) + ) + const progress = vi.fn() + return expect(provider.refreshUpdateCache(progress)) + .rejects.toThrow() + .then(() => + expect(progress).toHaveBeenCalledWith({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + ) + }) + .then(() => + expect(provider.getUpdateDetails()).toEqual({ + version: '1.2.3', + files: releaseFiles, + releaseNotes: 'there is some', + downloadProgress: 100, + }) + ) + }) + it('will not run two checks at once', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + const first = provider.refreshUpdateCache(progressCallback) + const second = provider.refreshUpdateCache(progressCallback) + return Promise.all([ + expect(first).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }), + expect(second).rejects.toThrow(), + ]).then(() => expect(getOrDownloadManifest).toHaveBeenCalledOnce()) + }) + it('will run a second check after the first completes', () => { + when(getOrDownloadManifest) + .calledWith( + 'http://opentrons.com/releases.json', + '/some/random/directory', + expect.any(AbortController) + ) + .thenResolve({ + production: { + '1.2.3': { + system: 'http://opentrons.com/system.zip', + fullImage: 'http://opentrons.com/fullImage.zip', + version: 'http://opentrons.com/version.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + }, + }) + const progressCallback = vi.fn() + const provider = getProvider({ + manifestUrl: 'http://opentrons.com/releases.json', + channel: 'release', + updateCacheDirectory: '/some/random/directory', + currentVersion: '1.2.3', + }) + return expect(provider.refreshUpdateCache(progressCallback)) + .resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + .then(() => + expect(provider.refreshUpdateCache(progressCallback)).resolves.toEqual({ + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + }) + ) + }) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts new file mode 100644 index 00000000000..34df59eaf49 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/release-files.test.ts @@ -0,0 +1,514 @@ +// TODO(mc, 2020-06-11): test all release-files functions +import { vi, describe, it, expect, afterEach } from 'vitest' +import { when } from 'vitest-when' +import path from 'path' +import { promises as fs } from 'fs' + +import { fetchToFile as httpFetchToFile } from '../../../http' +import { + ensureCleanReleaseCacheForVersion, + getReleaseFiles, + downloadReleaseFiles, + getOrDownloadReleaseFiles, +} from '../release-files' + +import { directoryWithCleanup } from '../../utils' +import type { ReleaseSetUrls } from '../../types' + +vi.mock('../../../http') +vi.mock('../../../log') + +const fetchToFile = vi.mocked(httpFetchToFile) + +describe('ensureCleanReleaseCacheForVersion', () => { + it('should create the appropriate directory tree if it does not exist', () => + directoryWithCleanup(directory => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory', 'someotherrandomdirectory'), + '1.2.3' + ) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join( + directory, + 'somerandomdirectory', + 'someotherrandomdirectory', + 'cached-release-1.2.3' + ) + ) + return fs.stat(cacheDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should create the appropriate directory if the base directory entry is occupied by a file', () => + directoryWithCleanup(directory => + fs + .writeFile( + path.join(directory, 'somerandomdirectory'), + 'somerandomdata' + ) + .then(() => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory'), + '1.2.3' + ) + ) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3') + ) + return fs.stat(cacheDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should create the appropriate directory if the version directory entry is occupied by a file', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'somerandomdirectory')) + .then(() => + fs.writeFile( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3'), + 'somerandomdata' + ) + ) + .then(() => + ensureCleanReleaseCacheForVersion( + path.join(directory, 'somerandomdirectory'), + '1.2.3' + ) + ) + .then(baseDirectory => { + expect(baseDirectory).toEqual( + path.join(directory, 'somerandomdirectory', 'cached-release-1.2.3') + ) + return fs.stat(baseDirectory) + }) + .then(stats => expect(stats.isDirectory()).toBeTruthy()) + )) + it('should remove caches for other versions from the cache directory', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'cached-release-0.1.2')) + .then(() => fs.mkdir(path.join(directory, 'cached-release-4.5.6'))) + .then(() => + fs.writeFile( + path.join(directory, 'cached-release-4.5.6', 'test.zip'), + 'asfjohasda' + ) + ) + .then(() => ensureCleanReleaseCacheForVersion(directory, '1.2.3')) + .then(cacheDirectory => { + expect(cacheDirectory).toEqual( + path.join(directory, 'cached-release-1.2.3') + ) + return fs.readdir(directory) + }) + .then(contents => expect(contents).toEqual(['cached-release-1.2.3'])) + )) + it('should leave already-existing correct version cache directories untouched', () => + directoryWithCleanup(directory => + fs + .mkdir(path.join(directory, 'cached-release-1.2.3')) + .then(() => + fs.writeFile( + path.join(directory, 'cached-release-1.2.3', 'system.zip'), + '123123' + ) + ) + .then(() => ensureCleanReleaseCacheForVersion(directory, '1.2.3')) + .then(cacheDirectory => fs.readdir(cacheDirectory)) + .then(contents => { + expect(contents).toEqual(['system.zip']) + return fs.readFile( + path.join(directory, 'cached-release-1.2.3', 'system.zip'), + { encoding: 'utf-8' } + ) + }) + .then(contents => expect(contents).toEqual('123123')) + )) +}) + +describe('getReleaseFiles', () => { + it('should fail if no release files are cached', () => + directoryWithCleanup(directory => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).rejects.toThrow() + )) + it('should fail if system is not present but all others are', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'fullImage.zip'), 'aslkdjasd') + .then(() => fs.writeFile(path.join(directory, 'VERSION.json'), 'asdas')) + .then(() => + fs.writeFile(path.join(directory, 'releaseNotes.md'), 'asdalsda') + ) + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).rejects.toThrow() + ) + )) + it('should return available files if system.zip is one of them', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + ) + )) + it('should find release notes if available', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + fs.writeFile(path.join(directory, 'releaseNotes.md'), 'asdasda') + ) + .then(() => + expect( + getReleaseFiles( + { + fullImage: 'http://opentrons.com/fullImage.zip', + system: 'http://opentrons.com/ot3-system.zip', + version: 'http//opentrons.com/VERSION.json', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + }, + directory + ) + ).resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: path.join(directory, 'releaseNotes.md'), + releaseNotesContent: 'asdasda', + }) + ) + )) +}) + +describe('downloadReleaseFiles', () => { + afterEach(() => { + vi.resetAllMocks() + }) + it('should try and fetch both system zip and release notes', () => + directoryWithCleanup(directory => { + let tempSystemPath = '' + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + tempSystemPath = dest + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest) => { + return fs + .writeFile(dest, 'this is the contents of the release notes') + .then(() => dest) + }) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: path.join(directory, 'releaseNotes.md'), + releaseNotesContent: 'this is the contents of the release notes', + }) + return Promise.all([ + fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ), + fs + .readFile(files.releaseNotes as string, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual( + 'this is the contents of the release notes' + ) + ), + expect(fs.stat(path.dirname(tempSystemPath))).rejects.toThrow(), + ]) + }) + })) + it('should fetch only system zip if only system is available', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + return fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + }) + })) + it('should tolerate failing to fetch release notes', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no!')) + const progress = vi.fn() + return downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ).then(files => { + expect(files).toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + return fs + .readFile(files.system, { encoding: 'utf-8' }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + }) + })) + it('should fail if it cannot fetch system zip', () => + directoryWithCleanup(directory => { + let tempSystemPath = '' + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no')) + when(fetchToFile) + .calledWith( + 'http://opentrons.com/releaseNotes.md', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest) => { + tempSystemPath = dest + return fs + .writeFile(dest, 'this is the contents of the release notes') + .then(() => dest) + }) + const progress = vi.fn() + return expect( + downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + progress, + new AbortController() + ) + ) + .rejects.toThrow() + .then(() => + expect(fs.stat(path.dirname(tempSystemPath))).rejects.toThrow() + ) + })) + it('should allow the http requests to be aborted', () => + directoryWithCleanup(directory => { + const aborter = new AbortController() + const progressCallback = vi.fn() + when(fetchToFile) + .calledWith('http://opentrons.com/ot3-system.zip', expect.any(String), { + onProgress: progressCallback, + signal: aborter.signal, + }) + .thenDo( + (_url, dest, options) => + new Promise((resolve, reject) => { + const listener = () => { + reject(options.signal.reason) + } + options.signal.addEventListener('abort', listener, { once: true }) + aborter.abort('oh no!') + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + ) + return expect( + downloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + progressCallback, + aborter + ) + ).rejects.toThrow() + })) +}) + +describe('getOrDownloadReleaseFiles', () => { + it('should not download release files if they are cached', () => + directoryWithCleanup(directory => + fs + .writeFile(path.join(directory, 'ot3-system.zip'), 'asdjlhasd') + .then(() => + expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + releaseNotes: 'http://opentrons.com/releaseNotes.md', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ) + .resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + .then(() => expect(fetchToFile).not.toHaveBeenCalled()) + ) + )) + it('should download release files if they are not cached', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenDo((_url, dest, _opts) => { + return fs + .writeFile(dest, 'this is the contents of the system.zip') + .then(() => dest) + }) + + return expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ) + .resolves.toEqual({ + system: path.join(directory, 'ot3-system.zip'), + releaseNotes: null, + releaseNotesContent: null, + }) + .then(() => + fs + .readFile(path.join(directory, 'ot3-system.zip'), { + encoding: 'utf-8', + }) + .then(contents => + expect(contents).toEqual('this is the contents of the system.zip') + ) + ) + })) + it('should fail if the file is not cached and can not be downloaded', () => + directoryWithCleanup(directory => { + when(fetchToFile) + .calledWith( + 'http://opentrons.com/ot3-system.zip', + expect.any(String), + expect.any(Object) + ) + .thenReject(new Error('oh no')) + + return expect( + getOrDownloadReleaseFiles( + { + system: 'http://opentrons.com/ot3-system.zip', + } as ReleaseSetUrls, + directory, + vi.fn(), + new AbortController() + ) + ).rejects.toThrow() + })) +}) diff --git a/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts b/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts new file mode 100644 index 00000000000..8062cd6b28b --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/__tests__/release-manifest.test.ts @@ -0,0 +1,185 @@ +import { describe, it, vi, expect } from 'vitest' +import { when } from 'vitest-when' +import path from 'path' +import { readdir, writeFile, mkdir, readFile } from 'fs/promises' +import { fetchJson as _fetchJson } from '../../../http' +import { ensureCacheDir, getOrDownloadManifest } from '../release-manifest' +import { directoryWithCleanup } from '../../utils' + +vi.mock('../../../http') +// note: this doesn't look like it's needed but it is because http uses log +vi.mock('../../../log') +const fetchJson = vi.mocked(_fetchJson) + +const MOCK_MANIFEST = { + production: { + '1.2.3': { + fullImage: 'https://opentrons.com/no', + system: 'https://opentrons.com/no2', + version: 'https://opentrons.com/no3', + releaseNotes: 'https://opentrons.com/no4', + }, + }, +} + +describe('ensureCacheDirectory', () => { + it('should create the cache directory if it or its parents do not exist', () => + directoryWithCleanup(directory => + ensureCacheDir( + path.join(directory as string, 'somerandomname', 'someotherrandomname') + ) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname', 'someotherrandomname') + ) + return readdir(path.join(directory, 'somerandomname'), { + withFileTypes: true, + }) + }) + .then(contents => { + expect(contents).toHaveLength(1) + expect(contents[0].isDirectory()).toBeTruthy() + expect(contents[0].name).toEqual('someotherrandomname') + return readdir(path.join(contents[0].path, contents[0].name)) + }) + .then(contents => { + expect(contents).toHaveLength(0) + }) + )) + it('should delete and recreate the cache directory if it is a file', () => + directoryWithCleanup(directory => + writeFile(path.join(directory, 'somerandomname'), 'alsdasda') + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname') + ) + return readdir(directory, { withFileTypes: true }) + }) + .then(contents => { + expect(contents).toHaveLength(1) + expect(contents[0].isDirectory()).toBeTruthy() + expect(contents[0].name).toEqual('somerandomname') + return readdir(path.join(contents[0].path, contents[0].name)) + }) + .then(contents => { + expect(contents).toHaveLength(0) + }) + )) + + it('should remove a non-file with the same name as the manifest file', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'somerandomname', 'manifest.json'), { + recursive: true, + }) + .then(() => + writeFile( + path.join(directory, 'somerandomname', 'testfile'), + 'testdata' + ) + ) + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => readdir(ensuredDirectory)) + .then(contents => { + expect(contents).not.toContain('manifest.json') + return readFile(path.join(directory, 'somerandomname', 'testfile'), { + encoding: 'utf-8', + }) + }) + .then(contents => expect(contents).toEqual('testdata')) + )) + + it('should preserve extra contents of the directory if the directory exists', () => + directoryWithCleanup(directory => + mkdir(path.join(directory, 'somerandomname'), { recursive: true }) + .then(() => + writeFile( + path.join(directory, 'somerandomname', 'somerandomfile'), + 'somerandomdata' + ) + ) + .then(() => ensureCacheDir(path.join(directory, 'somerandomname'))) + .then(ensuredDirectory => { + expect(ensuredDirectory).toEqual( + path.join(directory, 'somerandomname') + ) + return readFile( + path.join(directory, 'somerandomname', 'somerandomfile'), + { encoding: 'utf-8' } + ) + }) + .then(contents => { + expect(contents).toEqual('somerandomdata') + return readdir(directory) + }) + .then(contents => expect(contents).toEqual(['somerandomname'])) + )) +}) + +describe('getOrDownloadManifest', () => { + const localManifest = { + production: { + '4.5.6': { + fullImage: 'https://opentrons.com/no', + system: 'https://opentrons.com/no2', + version: 'https://opentrons.com/no3', + releaseNotes: 'https://opentrons.com/no4', + }, + }, + } + it('should download a new manifest if possible', () => + directoryWithCleanup(directory => + writeFile( + path.join(directory, 'manifest.json'), + JSON.stringify(localManifest) + ) + .then(() => { + when(fetchJson) + .calledWith( + 'http://opentrons.com/releases.json', + expect.any(Object) + ) + .thenResolve(MOCK_MANIFEST) + return getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + }) + .then(manifest => expect(manifest).toEqual(MOCK_MANIFEST)) + )) + it('should use a cached manifest if the download fails', () => + directoryWithCleanup(directory => + writeFile( + path.join(directory, 'manifest.json'), + JSON.stringify(localManifest) + ) + .then(() => { + when(fetchJson) + .calledWith( + 'http://opentrons.com/releases.json', + expect.any(Object) + ) + .thenReject(new Error('oh no!')) + return getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + }) + .then(manifest => expect(manifest).toEqual(localManifest)) + )) + it('should reject if no manifest is available', () => + directoryWithCleanup(directory => { + when(fetchJson) + .calledWith('http://opentrons.com/releases.json', expect.any(Object)) + .thenReject(new Error('oh no!')) + return expect( + getOrDownloadManifest( + 'http://opentrons.com/releases.json', + directory, + new AbortController() + ) + ).rejects.toThrow() + })) +}) diff --git a/app-shell-odd/src/system-update/from-web/index.ts b/app-shell-odd/src/system-update/from-web/index.ts new file mode 100644 index 00000000000..0a9c34e3370 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/index.ts @@ -0,0 +1,2 @@ +export { getProvider } from './provider' +export type { WebUpdateSource } from './provider' diff --git a/app-shell-odd/src/system-update/from-web/latest-update.ts b/app-shell-odd/src/system-update/from-web/latest-update.ts new file mode 100644 index 00000000000..1a270c85ddd --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/latest-update.ts @@ -0,0 +1,28 @@ +import semver from 'semver' + +const channelFinder = (version: string, channel: string): boolean => { + // return the latest alpha/beta if a user subscribes to alpha/beta updates + if (['alpha', 'beta'].includes(channel)) { + return version.includes(channel) + } else { + // otherwise get the latest stable version + return !version.includes('alpha') && !version.includes('beta') + } +} + +export const latestVersionForChannel = ( + availableVersions: string[], + channel: string +): string | null => + availableVersions + .filter(version => channelFinder(version, channel)) + .sort((a, b) => (semver.gt(a, b) ? 1 : -1)) + .pop() ?? null + +export const shouldUpdate = ( + currentVersion: string, + availableVersion: string | null +): string | null => + availableVersion != null && currentVersion !== availableVersion + ? availableVersion + : null diff --git a/app-shell-odd/src/system-update/from-web/provider.ts b/app-shell-odd/src/system-update/from-web/provider.ts new file mode 100644 index 00000000000..ca5c8da9fc9 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/provider.ts @@ -0,0 +1,209 @@ +import path from 'path' +import { rm } from 'fs/promises' + +import { createLogger } from '../../log' +import { LocalAbortError } from '../../http' + +import type { + UpdateProvider, + ResolvedUpdate, + UnresolvedUpdate, + ProgressCallback, + NoUpdate, +} from '../types' + +import { getOrDownloadManifest, getReleaseSet } from './release-manifest' +import { cleanUpAndGetOrDownloadReleaseFiles } from './release-files' +import { latestVersionForChannel, shouldUpdate } from './latest-update' + +import type { DownloadProgress } from '../../http' + +const log = createLogger('systemUpdate/from-web/provider') + +export interface WebUpdateSource { + manifestUrl: string + channel: string + updateCacheDirectory: string + currentVersion: string +} + +export function getProvider( + from: WebUpdateSource +): UpdateProvider { + let locked = false + let canceller = new AbortController() + const lockCache = (): void => { + locked = true + canceller.abort('cache locked') + canceller = new AbortController() + } + const versionCacheDir = path.join(from.updateCacheDirectory, 'versions') + const noUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + let currentUpdate: UnresolvedUpdate = noUpdate + let currentCheck: Promise | null = null + const updater = async ( + progress: ProgressCallback + ): Promise => { + const myCanceller = canceller + // this needs to be an `as`-assertion on the value because we can only guarantee that + // currentUpdate is resolved by the function of the program: we know that this function, + // which is the only thing that can alter currentUpdate, will always end with a resolved update, + // and we know that this function will not be running twice at the same time. + // eslint-disable-next-line @typescript-eslint/consistent-type-assertions + const previousUpdate = { + version: currentUpdate.version, + files: currentUpdate.files == null ? null : { ...currentUpdate.files }, + releaseNotes: currentUpdate.releaseNotes, + downloadProgress: currentUpdate.downloadProgress, + } as ResolvedUpdate + if (locked) { + throw new Error('cache locked') + } + const returnNoUpdate = (): NoUpdate => { + currentUpdate = noUpdate + progress(noUpdate) + return noUpdate + } + const manifest = await getOrDownloadManifest( + from.manifestUrl, + from.updateCacheDirectory, + myCanceller + ).catch((error: Error) => { + if (myCanceller.signal.aborted) { + log.info('aborted cache update because cache was locked') + currentUpdate = previousUpdate + progress(previousUpdate) + throw error + } + log.info( + `Failed to get or download update manifest: ${error.name}: ${error.message}` + ) + return null + }) + if (manifest == null) { + log.info(`no manifest found, returning`) + return returnNoUpdate() + } + const latestVersion = latestVersionForChannel( + Object.keys(manifest.production), + from.channel + ) + + const versionToUpdate = shouldUpdate(from.currentVersion, latestVersion) + if (versionToUpdate == null) { + log.debug(`no update found, returning`) + return returnNoUpdate() + } + const releaseUrls = getReleaseSet(manifest, versionToUpdate) + if (releaseUrls == null) { + log.debug(`no release urls found, returning`) + return returnNoUpdate() + } + log.info(`Finding version ${latestVersion}`) + const downloadingUpdate = { + version: latestVersion, + files: null, + releaseNotes: null, + downloadProgress: 0, + } as const + progress(downloadingUpdate) + currentUpdate = downloadingUpdate + + if (myCanceller.signal.aborted) { + log.info('aborted cache update because cache was locked') + currentUpdate = previousUpdate + progress(previousUpdate) + throw new LocalAbortError('cache locked') + } + const localFiles = await cleanUpAndGetOrDownloadReleaseFiles( + releaseUrls, + versionCacheDir, + versionToUpdate, + (downloadProgress: DownloadProgress): void => { + const downloadProgressPercent = + downloadProgress.size == null || downloadProgress.size === 0.0 + ? 0 + : (downloadProgress.downloaded / downloadProgress.size) * 100 + log.debug( + `Downloading update ${versionToUpdate}: ${downloadProgress.downloaded}/${downloadProgress.size}B (${downloadProgressPercent}%)` + ) + const update = { + version: versionToUpdate, + files: null, + releaseNotes: null, + downloadProgress: downloadProgressPercent, + } + currentUpdate = update + progress(update) + }, + myCanceller + ).catch((err: Error) => { + if (myCanceller.signal.aborted) { + currentUpdate = previousUpdate + progress(previousUpdate) + throw err + } else { + log.warn(`Failed to fetch update data: ${err.name}: ${err.message}`) + } + return null + }) + + if (localFiles == null) { + log.info( + `Download of ${versionToUpdate} failed, no release data is available` + ) + return returnNoUpdate() + } + if (myCanceller.signal.aborted) { + currentUpdate = previousUpdate + progress(previousUpdate) + throw new LocalAbortError('cache locked') + } + + const updateDetails = { + version: versionToUpdate, + files: { + system: localFiles.system, + releaseNotes: localFiles.releaseNotes, + }, + releaseNotes: localFiles.releaseNotesContent, + downloadProgress: 100, + } as const + currentUpdate = updateDetails + progress(updateDetails) + return updateDetails + } + return { + getUpdateDetails: () => currentUpdate, + refreshUpdateCache: (progress: ProgressCallback) => { + if (currentCheck != null) { + return new Promise((resolve, reject) => { + reject(new Error('Check already ongoing')) + }) + } else { + const updaterPromise = updater(progress) + currentCheck = updaterPromise + return updaterPromise.finally(() => { + currentCheck = null + }) + } + }, + + teardown: () => { + lockCache() + return rm(from.updateCacheDirectory, { recursive: true, force: true }) + }, + lockUpdateCache: lockCache, + unlockUpdateCache: () => { + locked = false + }, + name: () => + `WebUpdateProvider from ${from.manifestUrl} channel ${from.channel}`, + source: () => from, + } +} diff --git a/app-shell-odd/src/system-update/from-web/release-files.ts b/app-shell-odd/src/system-update/from-web/release-files.ts new file mode 100644 index 00000000000..a3c45cf5d42 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/release-files.ts @@ -0,0 +1,243 @@ +// functions for downloading and storing release files + +import path from 'path' +import tempy from 'tempy' +import { move, readdir, rm, mkdirp, readFile } from 'fs-extra' +import { fetchToFile } from '../../http' +import { createLogger } from '../../log' + +import type { DownloadProgress } from '../../http' +import type { ReleaseSetUrls, ReleaseSetFilepaths } from '../types' +import type { Dirent } from 'fs' + +const log = createLogger('systemUpdate/from-web/release-files') +const outPath = (dir: string, url: string): string => { + return path.join(dir, path.basename(url)) +} + +const RELEASE_DIRECTORY_PREFIX = 'cached-release-' + +export const directoryNameForRelease = (version: string): string => + `${RELEASE_DIRECTORY_PREFIX}${version}` + +export const directoryForRelease = ( + baseDirectory: string, + version: string +): string => path.join(baseDirectory, directoryNameForRelease(version)) + +async function ensureReleaseCache(baseDirectory: string): Promise { + try { + return await readdir(baseDirectory, { withFileTypes: true }) + } catch (error: any) { + console.log( + `Could not read download cache base directory: ${error.name}: ${error.message}: remaking` + ) + await rm(baseDirectory, { force: true, recursive: true }) + await mkdirp(baseDirectory) + return [] + } +} + +export const ensureCleanReleaseCacheForVersion = ( + baseDirectory: string, + version: string +): Promise => + ensureReleaseCache(baseDirectory) + .then(contents => + Promise.all( + contents.map(contained => + !contained.isDirectory() || + contained.name !== directoryNameForRelease(version) + ? rm(path.join(baseDirectory, contained.name), { + force: true, + recursive: true, + }) + : new Promise(resolve => { + resolve() + }) + ) + ) + ) + .then(() => mkdirp(directoryForRelease(baseDirectory, version))) + .then(() => directoryForRelease(baseDirectory, version)) + +export interface ReleaseSetData extends ReleaseSetFilepaths { + releaseNotesContent: string | null +} + +export const augmentWithReleaseNotesContent = ( + releaseFiles: ReleaseSetFilepaths +): Promise => + releaseFiles.releaseNotes == null + ? new Promise(resolve => { + resolve({ ...releaseFiles, releaseNotesContent: null }) + }) + : readReleaseNotes(releaseFiles.releaseNotes) + .then(releaseNotesContent => ({ ...releaseFiles, releaseNotesContent })) + .catch(err => { + log.error( + `Release notes should be present but cannot be read: ${err.name}: ${err.message}` + ) + return { ...releaseFiles, releaseNotesContent: null } + }) + +// checks `directory` for system update files matching the given `urls`, and +// downloads them if they can't be found +export function getReleaseFiles( + urls: ReleaseSetUrls, + directory: string +): Promise { + return readdir(directory).then((files: string[]) => { + log.info(`Files in system update download directory ${directory}: ${files}`) + const expected = { + system: path.basename(urls.system), + releaseNotes: + urls?.releaseNotes == null ? null : path.basename(urls.releaseNotes), + } + const foundFiles = files.reduce>( + ( + releaseSetFilePaths: Partial, + thisFile: string + ): Partial => { + if (thisFile === expected.system) { + return { ...releaseSetFilePaths, system: thisFile } + } + if ( + expected.releaseNotes != null && + thisFile === expected.releaseNotes + ) { + return { ...releaseSetFilePaths, releaseNotes: thisFile } + } + return releaseSetFilePaths + }, + {} + ) + if (foundFiles?.system != null) { + const files = { + system: outPath(directory, foundFiles.system), + releaseNotes: + foundFiles?.releaseNotes != null + ? outPath(directory, foundFiles.releaseNotes) + : null, + } + log.info( + `Found system file ${foundFiles.system} in cache directory ${directory}` + ) + return augmentWithReleaseNotesContent(files) + } + + throw new Error( + `no release files cached: could not find system file ${outPath( + directory, + urls.system + )} in ${files}` + ) + }) +} + +// downloads the entire release set to a temporary directory, and once they're +// all successfully downloaded, renames the directory to `directory` +export function downloadReleaseFiles( + urls: ReleaseSetUrls, + directory: string, + // `onProgress` will be called with download progress as the files are read + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise { + const tempDir: string = tempy.directory() + const tempSystemPath = outPath(tempDir, urls.system) + const tempNotesPath = outPath(tempDir, urls.releaseNotes ?? '') + // downloads are streamed directly to the filesystem to avoid loading them + // all into memory simultaneously + const notesReq = + urls.releaseNotes != null + ? fetchToFile(urls.releaseNotes, tempNotesPath, { + signal: canceller.signal, + }).catch(err => { + log.warn( + `release notes not available from ${urls.releaseNotes}: ${err.name}: ${err.message}` + ) + return null + }) + : Promise.resolve(null) + if (urls.releaseNotes != null) { + log.info(`Downloading ${urls.releaseNotes} to ${tempNotesPath}`) + } else { + log.info('No release notes available, not downloading') + } + log.info(`Downloading ${urls.system} to ${tempSystemPath}`) + const systemReq = fetchToFile(urls.system, tempSystemPath, { + onProgress, + signal: canceller.signal, + }) + return Promise.all([systemReq, notesReq]) + .then(results => { + const [systemTemp, releaseNotesTemp] = results + const systemPath = outPath(directory, systemTemp) + const notesPath = releaseNotesTemp + ? outPath(directory, releaseNotesTemp) + : null + + log.info(`Download complete, ${tempDir}=>${directory}`) + + return move(tempDir, directory, { overwrite: true }).then(() => { + log.info(`Move complete`) + return augmentWithReleaseNotesContent({ + system: systemPath, + releaseNotes: notesPath, + }) + }) + }) + .catch(error => { + log.error( + `Failed to download release files: ${error.name}: ${error.message}` + ) + return rm(tempDir, { force: true, recursive: true }).then(() => { + throw error + }) + }) +} + +export async function getOrDownloadReleaseFiles( + urls: ReleaseSetUrls, + releaseCacheDirectory: string, + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise { + try { + return await getReleaseFiles(urls, releaseCacheDirectory) + } catch (error: any) { + log.info( + `Could not find cached release files for ${releaseCacheDirectory}: ${error.name}: ${error.message}, attempting to download` + ) + return await downloadReleaseFiles( + urls, + releaseCacheDirectory, + onProgress, + canceller + ) + } +} + +export const cleanUpAndGetOrDownloadReleaseFiles = ( + urls: ReleaseSetUrls, + baseDirectory: string, + version: string, + onProgress: (progress: DownloadProgress) => void, + canceller: AbortController +): Promise => + ensureCleanReleaseCacheForVersion(baseDirectory, version).then(versionCache => + getOrDownloadReleaseFiles(urls, versionCache, onProgress, canceller) + ) + +const readReleaseNotes = (path: string | null): Promise => + path == null + ? new Promise(resolve => { + resolve(null) + }) + : readFile(path, { encoding: 'utf-8' }).catch(err => { + log.warn( + `Could not read release notes from ${path}: ${err.name}: ${err.message}` + ) + return null + }) diff --git a/app-shell-odd/src/system-update/from-web/release-manifest.ts b/app-shell-odd/src/system-update/from-web/release-manifest.ts new file mode 100644 index 00000000000..9433067cb17 --- /dev/null +++ b/app-shell-odd/src/system-update/from-web/release-manifest.ts @@ -0,0 +1,101 @@ +import * as FS from 'fs/promises' +import path from 'path' +import { readJson, outputJson } from 'fs-extra' + +import type { Stats } from 'fs' +import { fetchJson, LocalAbortError } from '../../http' +import type { ReleaseManifest, ReleaseSetUrls } from '../types' +import { createLogger } from '../../log' + +const log = createLogger('systemUpdate/from-web/provider') + +export function getReleaseSet( + manifest: ReleaseManifest, + version: string +): ReleaseSetUrls | null { + return manifest.production[version] ?? null +} + +export const getCachedReleaseManifest = ( + cacheDir: string +): Promise => readJson(`${cacheDir}/manifest.json`) + +const removeAndRemake = (directory: string): Promise => + FS.rm(directory, { recursive: true, force: true }) + .then(() => FS.mkdir(directory, { recursive: true })) + .then(() => FS.stat(directory)) + +export const ensureCacheDir = (directory: string): Promise => + FS.stat(directory) + .catch(() => removeAndRemake(directory)) + .then(stats => + stats.isDirectory() + ? new Promise(resolve => { + resolve(stats) + }) + : removeAndRemake(directory) + ) + .then(() => FS.readdir(directory, { withFileTypes: true })) + .then(contents => { + const manifestCandidate = contents.find( + entry => entry.name === 'manifest.json' + ) + if (manifestCandidate == null || manifestCandidate.isFile()) { + return new Promise(resolve => { + resolve(directory) + }) + } + return FS.rm(path.join(directory, 'manifest.json'), { + force: true, + recursive: true, + }).then(() => directory) + }) + +export const downloadManifest = ( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise => { + log.info(`Attempting to fetch release manifest from ${manifestUrl}`) + return fetchJson(manifestUrl, { + signal: cancel.signal, + }).then(manifest => { + log.info('Fetched release manifest OK') + return outputJson(path.join(cacheDir, 'manifest.json'), manifest).then( + () => manifest + ) + }) +} + +export const ensureCacheDirAndDownloadManifest = ( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise => + ensureCacheDir(cacheDir).then(ensuredCacheDir => + downloadManifest(manifestUrl, ensuredCacheDir, cancel) + ) + +export async function getOrDownloadManifest( + manifestUrl: string, + cacheDir: string, + cancel: AbortController +): Promise { + try { + return await ensureCacheDirAndDownloadManifest( + manifestUrl, + cacheDir, + cancel + ) + } catch (error: any) { + if (error instanceof LocalAbortError) { + log.info('Aborted during manifest fetch') + throw error + } else { + log.info( + `Could not fetch manifest: ${error.name}: ${error.message}, falling back to cached` + ) + return await getCachedReleaseManifest(cacheDir) + } + } +} diff --git a/app-shell-odd/src/system-update/handler.ts b/app-shell-odd/src/system-update/handler.ts new file mode 100644 index 00000000000..8344578e9fa --- /dev/null +++ b/app-shell-odd/src/system-update/handler.ts @@ -0,0 +1,380 @@ +// system update handler + +import Semver from 'semver' + +import { CONFIG_INITIALIZED, VALUE_UPDATED } from '../constants' +import { createLogger } from '../log' +import { postFile } from '../http' +import { getConfig } from '../config' +import { getSystemUpdateDir } from './directories' +import { SYSTEM_FILENAME, FLEX_MANIFEST_URL } from './constants' +import { getProvider as getWebUpdateProvider } from './from-web' +import { getProvider as getUsbUpdateProvider } from './from-usb' + +import type { Action, Dispatch } from '../types' +import type { UpdateProvider, UnresolvedUpdate, ReadyUpdate } from './types' +import type { USBUpdateSource } from './from-usb' + +export const CURRENT_SYSTEM_VERSION = _PKG_VERSION_ + +const log = createLogger('system-update/handler') + +export interface UpdateDriver { + handleAction: (action: Action) => Promise + reload: () => Promise + shouldReload: () => boolean + teardown: () => Promise +} + +export function createUpdateDriver(dispatch: Dispatch): UpdateDriver { + log.info(`Running robot system updates storing to ${getSystemUpdateDir()}`) + + let webUpdate: UnresolvedUpdate = { + version: null, + files: null, + releaseNotes: null, + downloadProgress: 0, + } + let webProvider = getWebUpdateProvider({ + manifestUrl: FLEX_MANIFEST_URL, + channel: getConfig('update').channel, + updateCacheDirectory: getSystemUpdateDir(), + currentVersion: CURRENT_SYSTEM_VERSION, + }) + const usbProviders: Record> = {} + let currentBestUsbUpdate: + | (ReadyUpdate & { providerName: string }) + | null = null + + const updateBestUsbUpdate = (): void => { + currentBestUsbUpdate = null + Object.values(usbProviders).forEach(provider => { + const providerUpdate = provider.getUpdateDetails() + if (providerUpdate.files == null) { + // nothing to do, keep null + } else if (currentBestUsbUpdate == null) { + currentBestUsbUpdate = { + ...(providerUpdate as ReadyUpdate), + providerName: provider.name(), + } + } else if ( + Semver.gt(providerUpdate.version, currentBestUsbUpdate.version) + ) { + currentBestUsbUpdate = { + ...(providerUpdate as ReadyUpdate), + providerName: provider.name(), + } + } + }) + } + + const dispatchStaticUpdateData = (): void => { + if (currentBestUsbUpdate != null) { + dispatchUpdateInfo( + { + version: currentBestUsbUpdate.version, + releaseNotes: currentBestUsbUpdate.releaseNotes, + force: true, + }, + dispatch + ) + } else { + dispatchUpdateInfo( + { + version: webUpdate.version, + releaseNotes: webUpdate.releaseNotes, + force: false, + }, + dispatch + ) + } + } + + return { + handleAction: (action: Action): Promise => { + switch (action.type) { + case 'shell:CHECK_UPDATE': + return webProvider + .refreshUpdateCache(updateStatus => { + webUpdate = updateStatus + if (currentBestUsbUpdate == null) { + if ( + updateStatus.version != null && + updateStatus.files == null && + updateStatus.downloadProgress === 0 + ) { + dispatch({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { + version: updateStatus.version, + force: false, + target: 'flex', + }, + }) + } else if ( + updateStatus.version != null && + updateStatus.files == null && + updateStatus.downloadProgress !== 0 + ) { + dispatch({ + // TODO: change this action type to 'systemUpdate:DOWNLOAD_PROGRESS' + type: 'robotUpdate:DOWNLOAD_PROGRESS', + payload: { + progress: updateStatus.downloadProgress, + target: 'flex', + }, + }) + } else if (updateStatus.files != null) { + dispatchStaticUpdateData() + } + } + }) + .catch(err => { + log.warn( + `Error finding updates with ${webProvider.name()}: ${ + err.name + }: ${err.message}` + ) + return { + version: null, + files: null, + downloadProgress: 0, + releaseNotes: null, + } as const + }) + .then(result => { + webUpdate = result + dispatchStaticUpdateData() + }) + case 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED': + log.info( + `mass storage device enumerated at ${action.payload.rootPath}` + ) + if (usbProviders[action.payload.rootPath] != null) { + return new Promise(resolve => { + resolve() + }) + } + usbProviders[action.payload.rootPath] = getUsbUpdateProvider({ + currentVersion: CURRENT_SYSTEM_VERSION, + massStorageDeviceRoot: action.payload.rootPath, + massStorageDeviceFiles: action.payload.filePaths, + }) + return usbProviders[action.payload.rootPath] + .refreshUpdateCache(() => {}) + .then(() => { + updateBestUsbUpdate() + dispatchStaticUpdateData() + }) + .catch(err => { + log.error( + `Failed to get updates from ${action.payload.rootPath}: ${err.name}: ${err.message}` + ) + }) + + case 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED': + log.info(`mass storage removed at ${action.payload.rootPath}`) + const provider = usbProviders[action.payload.rootPath] + if (provider != null) { + return provider + .teardown() + .then(() => { + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete usbProviders[action.payload.rootPath] + updateBestUsbUpdate() + }) + .catch(err => { + log.error( + `Failed to tear down provider ${provider.name()}: ${ + err.name + }: ${err.message}` + ) + }) + .then(() => { + dispatchStaticUpdateData() + }) + } + return new Promise(resolve => { + resolve() + }) + case 'robotUpdate:UPLOAD_FILE': { + const { host, path, systemFile } = action.payload + // eslint-disable-next-line @typescript-eslint/no-floating-promises + return postFile( + `http://${host.ip}:${host.port}${path}`, + SYSTEM_FILENAME, + systemFile + ) + .then(() => ({ + type: 'robotUpdate:FILE_UPLOAD_DONE' as const, + payload: host.name, + })) + .catch((error: Error) => { + log.warn('Error uploading update to robot', { + path, + systemFile, + error, + }) + + return { + type: 'robotUpdate:UNEXPECTED_ERROR' as const, + payload: { + message: `Error uploading update to robot: ${error.message}`, + }, + } + }) + .then(dispatch) + } + case 'robotUpdate:READ_SYSTEM_FILE': { + const getDetails = (): { + systemFile: string + version: string + isManualFile: false + } | null => { + if (currentBestUsbUpdate) { + return { + systemFile: currentBestUsbUpdate.files.system, + version: currentBestUsbUpdate.version, + isManualFile: false, + } + } else if (webUpdate.files?.system != null) { + return { + systemFile: webUpdate.files.system, + version: webUpdate.version as string, // version is string if files is not null + isManualFile: false, + } + } else { + return null + } + } + return new Promise(resolve => { + const details = getDetails() + if (details == null) { + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { message: 'System update file not downloaded' }, + }) + resolve() + return + } + + dispatch({ + type: 'robotUpdate:FILE_INFO' as const, + payload: details, + }) + resolve() + }) + } + case 'robotUpdate:READ_USER_FILE': { + return new Promise(resolve => { + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { + message: 'Updates of this kind are not implemented for ODD', + }, + }) + resolve() + }) + } + } + return new Promise(resolve => { + resolve() + }) + }, + reload: () => { + webProvider.lockUpdateCache() + return webProvider + .teardown() + .catch(err => { + log.error( + `Failed to tear down web provider ${webProvider.name()}: ${ + err.name + }: ${err.message}` + ) + }) + .then(() => { + webProvider = getWebUpdateProvider({ + manifestUrl: FLEX_MANIFEST_URL, + channel: getConfig('update').channel, + updateCacheDirectory: getSystemUpdateDir(), + currentVersion: CURRENT_SYSTEM_VERSION, + }) + }) + .catch(err => { + const message = `System updates failed to handle config change: ${err.name}: ${err.message}` + log.error(message) + dispatch({ + type: 'robotUpdate:UNEXPECTED_ERROR', + payload: { message: message }, + }) + }) + }, + shouldReload: () => + getConfig('update').channel !== webProvider.source().channel, + teardown: () => { + return Promise.allSettled([ + webProvider.teardown(), + ...Object.values(usbProviders).map(provider => provider.teardown()), + ]) + .catch(errs => { + log.error(`Failed to tear down some providers: ${errs}`) + }) + .then(results => { + log.info('all providers torn down') + }) + }, + } +} + +export interface UpdatableDriver { + getUpdateDriver: () => UpdateDriver | null + handleAction: (action: Action) => Promise +} + +export function manageDriver(dispatch: Dispatch): UpdatableDriver { + let updateDriver: UpdateDriver | null = null + return { + handleAction: action => { + if (action.type === CONFIG_INITIALIZED) { + log.info('Initializing update driver') + return new Promise(resolve => { + updateDriver = createUpdateDriver(dispatch) + resolve() + }) + } else if (updateDriver != null) { + if (action.type === VALUE_UPDATED && updateDriver.shouldReload()) { + return updateDriver.reload() + } else { + return updateDriver.handleAction(action) + } + } else { + return new Promise(resolve => { + log.warn( + `update driver manager received action ${action.type} before initialization` + ) + resolve() + }) + } + }, + getUpdateDriver: () => updateDriver, + } +} + +export function registerRobotSystemUpdate(dispatch: Dispatch): Dispatch { + return manageDriver(dispatch).handleAction +} + +const dispatchUpdateInfo = ( + info: { version: string | null; releaseNotes: string | null; force: boolean }, + dispatch: Dispatch +): void => { + const { version, releaseNotes, force } = info + dispatch({ + type: 'robotUpdate:UPDATE_INFO', + payload: { releaseNotes, version, force, target: 'flex' }, + }) + dispatch({ + type: 'robotUpdate:UPDATE_VERSION', + payload: { version, force, target: 'flex' }, + }) +} diff --git a/app-shell-odd/src/system-update/index.ts b/app-shell-odd/src/system-update/index.ts index 7d8e62fb8ac..4ec36b05a57 100644 --- a/app-shell-odd/src/system-update/index.ts +++ b/app-shell-odd/src/system-update/index.ts @@ -1,394 +1,2 @@ // system update files -import path from 'path' -import { ensureDir } from 'fs-extra' -import { readFile } from 'fs/promises' -import StreamZip from 'node-stream-zip' -import Semver from 'semver' -import { UI_INITIALIZED } from '../constants' -import { createLogger } from '../log' -import { - getLatestSystemUpdateUrls, - getLatestVersion, - isUpdateAvailable, - updateLatestVersion, -} from '../update' -import { - getReleaseFiles, - readUserFileInfo, - cleanupReleaseFiles, -} from './release-files' -import { uploadSystemFile } from './update' -import { getSystemUpdateDir } from './directories' - -import type { DownloadProgress } from '../http' -import type { Action, Dispatch } from '../types' -import type { ReleaseSetFilepaths } from './types' - -const log = createLogger('systemUpdate/index') -const REASONABLE_VERSION_FILE_SIZE_B = 4096 - -let isGettingLatestSystemFiles = false -const isGettingMassStorageUpdatesFrom: Set = new Set() -let massStorageUpdateSet: ReleaseSetFilepaths | null = null -let systemUpdateSet: ReleaseSetFilepaths | null = null - -const readFileInfoAndDispatch = ( - dispatch: Dispatch, - fileName: string, - isManualFile: boolean = false -): Promise => - readUserFileInfo(fileName) - .then(fileInfo => ({ - type: 'robotUpdate:FILE_INFO' as const, - payload: { - systemFile: fileInfo.systemFile, - version: fileInfo.versionInfo.opentrons_api_version, - isManualFile, - }, - })) - .catch((error: Error) => ({ - type: 'robotUpdate:UNEXPECTED_ERROR' as const, - payload: { message: error.message }, - })) - .then(dispatch) - -export function registerRobotSystemUpdate(dispatch: Dispatch): Dispatch { - log.info(`Running robot system updates storing to ${getSystemUpdateDir()}`) - return function handleAction(action: Action) { - switch (action.type) { - case UI_INITIALIZED: - case 'shell:CHECK_UPDATE': - // short circuit early if we're already downloading the latest system files - if (isGettingLatestSystemFiles) { - log.info(`system update download already in progress`) - return - } - updateLatestVersion() - .then(() => { - if (isUpdateAvailable() && !isGettingLatestSystemFiles) { - isGettingLatestSystemFiles = true - return getLatestSystemUpdateFiles(dispatch) - } - }) - .then(() => { - isGettingLatestSystemFiles = false - }) - .catch((error: Error) => { - log.warn('Error checking for update', { - error, - }) - isGettingLatestSystemFiles = false - }) - - break - - case 'robotUpdate:UPLOAD_FILE': { - const { host, path, systemFile } = action.payload - // eslint-disable-next-line @typescript-eslint/no-floating-promises - uploadSystemFile(host, path, systemFile) - .then(() => ({ - type: 'robotUpdate:FILE_UPLOAD_DONE' as const, - payload: host.name, - })) - .catch((error: Error) => { - log.warn('Error uploading update to robot', { - path, - systemFile, - error, - }) - - return { - type: 'robotUpdate:UNEXPECTED_ERROR' as const, - payload: { - message: `Error uploading update to robot: ${error.message}`, - }, - } - }) - .then(dispatch) - - break - } - - case 'robotUpdate:READ_USER_FILE': { - const { systemFile } = action.payload as { systemFile: string } - // eslint-disable-next-line @typescript-eslint/no-floating-promises - readFileInfoAndDispatch(dispatch, systemFile, true) - break - } - case 'robotUpdate:READ_SYSTEM_FILE': { - const systemFile = - massStorageUpdateSet?.system ?? systemUpdateSet?.system - if (systemFile == null) { - dispatch({ - type: 'robotUpdate:UNEXPECTED_ERROR', - payload: { message: 'System update file not downloaded' }, - }) - return - } - // eslint-disable-next-line @typescript-eslint/no-floating-promises - readFileInfoAndDispatch(dispatch, systemFile) - break - } - case 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED': - if (isGettingMassStorageUpdatesFrom.has(action.payload.rootPath)) { - return - } - isGettingMassStorageUpdatesFrom.add(action.payload.rootPath) - getLatestMassStorageUpdateFiles(action.payload.filePaths, dispatch) - .then(() => { - isGettingMassStorageUpdatesFrom.delete(action.payload.rootPath) - }) - .catch(() => { - isGettingMassStorageUpdatesFrom.delete(action.payload.rootPath) - }) - break - case 'shell:ROBOT_MASS_STORAGE_DEVICE_REMOVED': - if ( - massStorageUpdateSet !== null && - massStorageUpdateSet.system.startsWith(action.payload.rootPath) - ) { - console.log( - `Mass storage device ${action.payload.rootPath} removed, reverting to non-usb updates` - ) - massStorageUpdateSet = null - getCachedSystemUpdateFiles(dispatch) - } else { - console.log( - `Mass storage device ${action.payload.rootPath} removed but this was not an update source` - ) - } - break - } - } -} - -const getVersionFromOpenedZipIfValid = (zip: StreamZip): Promise => - new Promise((resolve, reject) => { - Object.values(zip.entries()).forEach(entry => { - if ( - entry.isFile && - entry.name === 'VERSION.json' && - entry.size < REASONABLE_VERSION_FILE_SIZE_B - ) { - const contents = zip.entryDataSync(entry.name).toString('ascii') - try { - const parsedContents = JSON.parse(contents) - if (parsedContents?.robot_type !== 'OT-3 Standard') { - reject(new Error('not a Flex release file')) - } - const fileVersion = parsedContents?.opentrons_api_version - const version = Semver.valid(fileVersion as string) - if (version === null) { - reject(new Error(`${fileVersion} is not a valid version`)) - } else { - resolve(version) - } - } catch (error) { - reject(error) - } - } - }) - }) - -interface FileDetails { - path: string - version: string -} - -const getVersionFromZipIfValid = (path: string): Promise => - new Promise((resolve, reject) => { - const zip = new StreamZip({ file: path, storeEntries: true }) - zip.on('ready', () => { - getVersionFromOpenedZipIfValid(zip) - .then(version => { - zip.close() - resolve({ version, path }) - }) - .catch(err => { - zip.close() - reject(err) - }) - }) - zip.on('error', err => { - zip.close() - reject(err) - }) - }) - -const fakeReleaseNotesForMassStorage = (version: string): string => ` -# Opentrons Robot Software Version ${version} - -This update is from a USB mass storage device connected to your Flex, and release notes cannot be shown. - -Don't remove the USB mass storage device while the update is in progress. -` - -export const getLatestMassStorageUpdateFiles = ( - filePaths: string[], - dispatch: Dispatch -): Promise => - Promise.all( - filePaths.map(path => - path.endsWith('.zip') - ? getVersionFromZipIfValid(path).catch(() => null) - : new Promise(resolve => { - resolve(null) - }) - ) - ).then(values => { - const update = values.reduce( - (prev, current) => - prev === null - ? current === null - ? prev - : current - : current === null - ? prev - : Semver.gt(current.version, prev.version) - ? current - : prev, - null - ) - if (update === null) { - console.log('no updates found in mass storage device') - } else { - console.log(`found update to version ${update.version} on mass storage`) - const releaseNotes = fakeReleaseNotesForMassStorage(update.version) - massStorageUpdateSet = { system: update.path, releaseNotes } - dispatchUpdateInfo( - { version: update.version, releaseNotes, force: true }, - dispatch - ) - } - }) - -const dispatchUpdateInfo = ( - info: { version: string | null; releaseNotes: string | null; force: boolean }, - dispatch: Dispatch -): void => { - const { version, releaseNotes, force } = info - dispatch({ - type: 'robotUpdate:UPDATE_INFO', - payload: { releaseNotes, version, force, target: 'flex' }, - }) - dispatch({ - type: 'robotUpdate:UPDATE_VERSION', - payload: { version, force, target: 'flex' }, - }) -} - -// Get latest system update version -// 1. Ensure the system update directory exists -// 2. Get the manifest file from the local cache -// 3. Get the release files according to the manifest -// a. If the files need downloading, dispatch progress updates to UI -// 4. Cache the filepaths of the update files in memory -// 5. Dispatch info or error to UI -export function getLatestSystemUpdateFiles( - dispatch: Dispatch -): Promise { - const fileDownloadDir = path.join( - getSystemUpdateDir(), - 'robot-system-updates' - ) - - return ensureDir(getSystemUpdateDir()) - .then(() => getLatestSystemUpdateUrls()) - .then(urls => { - if (urls === null) { - const latestVersion = getLatestVersion() - log.warn('No release files in manifest', { - version: latestVersion, - }) - return Promise.reject( - new Error(`No release files in manifest for version ${latestVersion}`) - ) - } - - let prevPercentDone = 0 - - const handleProgress = (progress: DownloadProgress): void => { - const { downloaded, size } = progress - if (size !== null) { - const percentDone = Math.round((downloaded / size) * 100) - if (Math.abs(percentDone - prevPercentDone) > 0) { - if (massStorageUpdateSet === null) { - dispatch({ - // TODO: change this action type to 'systemUpdate:DOWNLOAD_PROGRESS' - type: 'robotUpdate:DOWNLOAD_PROGRESS', - payload: { progress: percentDone, target: 'flex' }, - }) - } - prevPercentDone = percentDone - } - } - } - - return getReleaseFiles(urls, fileDownloadDir, handleProgress) - .then(filepaths => { - return cacheUpdateSet(filepaths) - }) - .then(updateInfo => { - massStorageUpdateSet === null && - dispatchUpdateInfo({ force: false, ...updateInfo }, dispatch) - }) - .catch((error: Error) => { - dispatch({ - type: 'robotUpdate:DOWNLOAD_ERROR', - payload: { error: error.message, target: 'flex' }, - }) - }) - .then(() => - cleanupReleaseFiles(getSystemUpdateDir(), 'robot-system-updates') - ) - .catch((error: Error) => { - log.warn('Unable to cleanup old release files', { error }) - }) - }) -} - -export function getCachedSystemUpdateFiles( - dispatch: Dispatch -): Promise { - if (systemUpdateSet) { - return getInfoFromUpdateSet(systemUpdateSet) - .then(updateInfo => { - dispatchUpdateInfo({ force: false, ...updateInfo }, dispatch) - }) - .catch(err => { - console.log(`Could not get info from update set: ${err}`) - }) - } else { - dispatchUpdateInfo( - { version: null, releaseNotes: null, force: false }, - dispatch - ) - return new Promise(resolve => { - resolve('no files') - }) - } -} - -function getInfoFromUpdateSet( - filepaths: ReleaseSetFilepaths -): Promise<{ version: string; releaseNotes: string | null }> { - const version = getLatestVersion() - const releaseNotesContentPromise = filepaths.releaseNotes - ? readFile(filepaths.releaseNotes, 'utf8') - : new Promise(resolve => { - resolve(null) - }) - return releaseNotesContentPromise - .then(releaseNotes => ({ - version: version, - releaseNotes, - })) - .catch(() => ({ version: version, releaseNotes: '' })) -} - -function cacheUpdateSet( - filepaths: ReleaseSetFilepaths -): Promise<{ version: string; releaseNotes: string | null }> { - systemUpdateSet = filepaths - return getInfoFromUpdateSet(systemUpdateSet) -} +export { registerRobotSystemUpdate } from './handler' diff --git a/app-shell-odd/src/system-update/release-files.ts b/app-shell-odd/src/system-update/release-files.ts deleted file mode 100644 index 6ea57648d05..00000000000 --- a/app-shell-odd/src/system-update/release-files.ts +++ /dev/null @@ -1,148 +0,0 @@ -// functions for downloading and storing release files -import assert from 'assert' -import path from 'path' -import { promisify } from 'util' -import tempy from 'tempy' -import { move, readdir, remove } from 'fs-extra' -import StreamZip from 'node-stream-zip' -import getStream from 'get-stream' - -import { createLogger } from '../log' -import { fetchToFile } from '../http' -import type { DownloadProgress } from '../http' -import type { ReleaseSetUrls, ReleaseSetFilepaths, UserFileInfo } from './types' - -const VERSION_FILENAME = 'VERSION.json' - -const log = createLogger('systemUpdate/release-files') -const outPath = (dir: string, url: string): string => { - return path.join(dir, path.basename(url)) -} - -// checks `directory` for system update files matching the given `urls`, and -// downloads them if they can't be found -export function getReleaseFiles( - urls: ReleaseSetUrls, - directory: string, - onProgress: (progress: DownloadProgress) => unknown -): Promise { - return readdir(directory) - .catch(error => { - log.warn('Error retrieving files from filesystem', { error }) - return [] - }) - .then((files: string[]) => { - log.debug('Files in system update download directory', { files }) - const system = outPath(directory, urls.system) - const releaseNotes = outPath(directory, urls.releaseNotes ?? '') - - // TODO: check for release notes when OT-3 manifest points to real release notes - if (files.some(f => f === path.basename(system))) { - return { system, releaseNotes } - } - - return downloadReleaseFiles(urls, directory, onProgress) - }) -} - -// downloads the entire release set to a temporary directory, and once they're -// all successfully downloaded, renames the directory to `directory` -// TODO(mc, 2019-07-09): DRY this up if/when more than 2 files are required -export function downloadReleaseFiles( - urls: ReleaseSetUrls, - directory: string, - // `onProgress` will be called with download progress as the files are read - onProgress: (progress: DownloadProgress) => unknown -): Promise { - const tempDir: string = tempy.directory() - const tempSystemPath = outPath(tempDir, urls.system) - const tempNotesPath = outPath(tempDir, urls.releaseNotes ?? '') - - log.debug('directory created for robot update downloads', { tempDir }) - - // downloads are streamed directly to the filesystem to avoid loading them - // all into memory simultaneously - const systemReq = fetchToFile(urls.system, tempSystemPath, { onProgress }) - const notesReq = urls.releaseNotes - ? fetchToFile(urls.releaseNotes, tempNotesPath) - : Promise.resolve(null) - - return Promise.all([systemReq, notesReq]).then(results => { - const [systemTemp, releaseNotesTemp] = results - const systemPath = outPath(directory, systemTemp) - const notesPath = releaseNotesTemp - ? outPath(directory, releaseNotesTemp) - : null - - log.debug('renaming directory', { from: tempDir, to: directory }) - - return move(tempDir, directory, { overwrite: true }).then(() => ({ - system: systemPath, - releaseNotes: notesPath, - })) - }) -} - -export function readUserFileInfo(systemFile: string): Promise { - const openZip = new Promise((resolve, reject) => { - const zip = new StreamZip({ file: systemFile, storeEntries: true }) - .once('ready', handleReady) - .once('error', handleError) - - function handleReady(): void { - cleanup() - resolve(zip) - } - - function handleError(error: Error): void { - cleanup() - zip.close() - reject(error) - } - - function cleanup(): void { - zip.removeListener('ready', handleReady) - zip.removeListener('error', handleError) - } - }) - - return openZip.then(zip => { - const entries = zip.entries() - const streamFromZip = promisify(zip.stream.bind(zip)) - - assert(VERSION_FILENAME in entries, `${VERSION_FILENAME} not in archive`) - - const result = streamFromZip(VERSION_FILENAME) - // @ts-expect-error(mc, 2021-02-17): stream may be undefined - .then(getStream) - .then(JSON.parse) - .then(versionInfo => ({ - systemFile, - versionInfo, - })) - - result.finally(() => { - zip.close() - }) - - return result - }) -} - -export function cleanupReleaseFiles( - downloadsDir: string, - currentRelease: string -): Promise { - log.debug('deleting release files not part of release ', currentRelease) - - return readdir(downloadsDir, { withFileTypes: true }) - .then(files => { - return ( - files - // eslint-disable-next-line @typescript-eslint/strict-boolean-expressions - .filter(f => f.isDirectory() && f.name !== currentRelease) - .map(f => path.join(downloadsDir, f.name)) - ) - }) - .then(removals => Promise.all(removals.map(f => remove(f)))) -} diff --git a/app-shell-odd/src/system-update/release-manifest.ts b/app-shell-odd/src/system-update/release-manifest.ts deleted file mode 100644 index d27c8a04449..00000000000 --- a/app-shell-odd/src/system-update/release-manifest.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { readJson, outputJson } from 'fs-extra' -import { fetchJson } from '../http' -import { createLogger } from '../log' -import { getManifestCacheDir } from './directories' -import type { ReleaseManifest, ReleaseSetUrls } from './types' - -const log = createLogger('systemUpdate/release-manifest') - -export function getReleaseSet( - manifest: ReleaseManifest, - version: string -): ReleaseSetUrls | null { - return manifest.production[version] ?? null -} - -export const getCachedReleaseManifest = (): Promise => - readJson(getManifestCacheDir()) - -export const downloadAndCacheReleaseManifest = ( - manifestUrl: string -): Promise => - fetchJson(manifestUrl) - .then(manifest => { - return outputJson(getManifestCacheDir(), manifest).then(() => manifest) - }) - .catch((error: Error) => { - log.error('Error downloading the release manifest', { error }) - return readJson(getManifestCacheDir()) - }) diff --git a/app-shell-odd/src/system-update/types.ts b/app-shell-odd/src/system-update/types.ts index 8555d980791..12c2f5dc674 100644 --- a/app-shell-odd/src/system-update/types.ts +++ b/app-shell-odd/src/system-update/types.ts @@ -16,24 +16,47 @@ export interface ReleaseSetFilepaths { releaseNotes: string | null } -// shape of VERSION.json in update file -export interface VersionInfo { - buildroot_version: string - buildroot_sha: string - buildroot_branch: string - buildroot_buildid: string - build_type: string - opentrons_api_version: string - opentrons_api_sha: string - opentrons_api_branch: string - update_server_version: string - update_server_sha: string - update_server_branch: string +export interface NoUpdate { + version: null + files: null + releaseNotes: null + downloadProgress: 0 } -export interface UserFileInfo { - // filepath of update file - systemFile: string - // parsed contents of VERSION.json - versionInfo: VersionInfo +export interface FoundUpdate { + version: string + files: null + releaseNotes: null + downloadProgress: number +} + +export interface ReadyUpdate { + version: string + files: ReleaseSetFilepaths + releaseNotes: string | null + downloadProgress: 100 +} + +export type ResolvedUpdate = NoUpdate | ReadyUpdate +export type UnresolvedUpdate = ResolvedUpdate | FoundUpdate +export type ProgressCallback = (status: UnresolvedUpdate) => void + +// Interface provided by the web and usb sourced updaters. Type variable is +// specified by the updater implementation. +export interface UpdateProvider { + // Call before disposing to make sure any temporary storage is removed + teardown: () => Promise + // Scan an implementation-defined location for updates + refreshUpdateCache: (progress: ProgressCallback) => Promise + // Get the details of a found update, if any. + getUpdateDetails: () => UnresolvedUpdate + // Lock the update cache, which will prevent anything from accidentally overwriting stuff + // while it's being sent as an update + lockUpdateCache: () => void + // Reverse lockUpdateCache() + unlockUpdateCache: () => void + // get an identifier for logging + name: () => string + // get the current source + source: () => UpdateSourceDetails } diff --git a/app-shell-odd/src/system-update/update.ts b/app-shell-odd/src/system-update/update.ts deleted file mode 100644 index d1adb6e9c3d..00000000000 --- a/app-shell-odd/src/system-update/update.ts +++ /dev/null @@ -1,25 +0,0 @@ -import { postFile } from '../http' -import type { - RobotModel, - ViewableRobot, -} from '@opentrons/app/src/redux/discovery/types' - -const OT2_FILENAME = 'ot2-system.zip' -const SYSTEM_FILENAME = 'system-update.zip' - -const getSystemFileName = (robotModel: RobotModel): string => { - if (robotModel === 'OT-2 Standard' || robotModel === null) { - return OT2_FILENAME - } - return SYSTEM_FILENAME -} - -export function uploadSystemFile( - robot: ViewableRobot, - urlPath: string, - file: string -): Promise { - const url = `http://${robot.ip}:${robot.port}${urlPath}` - - return postFile(url, getSystemFileName(robot.robotModel), file) -} diff --git a/app-shell-odd/src/system-update/utils.ts b/app-shell-odd/src/system-update/utils.ts new file mode 100644 index 00000000000..e0a334ba5d4 --- /dev/null +++ b/app-shell-odd/src/system-update/utils.ts @@ -0,0 +1,18 @@ +import { rm } from 'fs/promises' +import tempy from 'tempy' + +export const directoryWithCleanup = ( + task: (directory: string) => Promise +): Promise => { + const directory = tempy.directory() + return new Promise((resolve, reject) => + task(directory as string) + .then(result => { + resolve(result) + }) + .catch(err => { + reject(err) + }) + .finally(() => rm(directory as string, { recursive: true, force: true })) + ) +} diff --git a/app-shell-odd/src/system.ts b/app-shell-odd/src/system.ts new file mode 100644 index 00000000000..36c427a7e94 --- /dev/null +++ b/app-shell-odd/src/system.ts @@ -0,0 +1,22 @@ +import { UPDATE_BRIGHTNESS } from './constants' +import { createLogger } from './log' +import systemd from './systemd' + +import type { Action } from './types' + +const log = createLogger('system') + +export function registerUpdateBrightness(): (action: Action) => void { + return function handleAction(action: Action) { + switch (action.type) { + case UPDATE_BRIGHTNESS: + console.log('update the brightness') + systemd + .updateBrightness(action.payload.message) + .catch(err => + log.debug('Something wrong when updating the brightness', err) + ) + break + } + } +} diff --git a/app-shell-odd/src/types.ts b/app-shell-odd/src/types.ts index 2899171a08b..5d8f8a9502a 100644 --- a/app-shell-odd/src/types.ts +++ b/app-shell-odd/src/types.ts @@ -112,11 +112,13 @@ export type CLEAR_CACHE_TYPE = 'discovery:CLEAR_CACHE' export interface ConfigInitializedAction { type: CONFIG_INITIALIZED_TYPE payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: CONFIG_VALUE_UPDATED_TYPE payload: { path: string; value: any } + meta: { shell: true } } export interface StartDiscoveryAction { diff --git a/app-shell-odd/src/update.ts b/app-shell-odd/src/update.ts deleted file mode 100644 index d1ea2f154b3..00000000000 --- a/app-shell-odd/src/update.ts +++ /dev/null @@ -1,113 +0,0 @@ -import semver from 'semver' -import { UI_INITIALIZED, UPDATE_BRIGHTNESS } from './constants' -import { createLogger } from './log' -import { getConfig } from './config' -import { - downloadAndCacheReleaseManifest, - getCachedReleaseManifest, - getReleaseSet, -} from './system-update/release-manifest' -import systemd from './systemd' - -import type { Action, Dispatch } from './types' -import type { ReleaseSetUrls } from './system-update/types' - -const log = createLogger('update') - -const OPENTRONS_PROJECT: string = _OPENTRONS_PROJECT_ - -export const FLEX_MANIFEST_URL = - OPENTRONS_PROJECT && OPENTRONS_PROJECT.includes('robot-stack') - ? 'https://builds.opentrons.com/ot3-oe/releases.json' - : 'https://ot3-development.builds.opentrons.com/ot3-oe/releases.json' - -const PKG_VERSION = _PKG_VERSION_ -let LATEST_OT_SYSTEM_VERSION = PKG_VERSION - -const channelFinder = (version: string, channel: string): boolean => { - // return the latest alpha/beta if a user subscribes to alpha/beta updates - if (['alpha', 'beta'].includes(channel)) { - return version.includes(channel) - } else { - // otherwise get the latest stable version - return !version.includes('alpha') && !version.includes('beta') - } -} - -export const getLatestSystemUpdateUrls = (): Promise => { - return getCachedReleaseManifest() - .then(manifest => getReleaseSet(manifest, getLatestVersion())) - .catch((error: Error) => { - log.warn('Error retrieving release manifest', { - version: getLatestVersion(), - error, - }) - return Promise.reject(error) - }) -} - -export const updateLatestVersion = (): Promise => { - const channel = getConfig('update').channel - - return downloadAndCacheReleaseManifest(FLEX_MANIFEST_URL) - .then(response => { - const latestAvailableVersion = Object.keys(response.production) - .sort((a, b) => { - if (semver.lt(a, b)) { - return 1 - } - return -1 - }) - .find(verson => channelFinder(verson, channel)) - const changed = LATEST_OT_SYSTEM_VERSION !== latestAvailableVersion - LATEST_OT_SYSTEM_VERSION = latestAvailableVersion ?? PKG_VERSION - if (changed) { - log.info( - `Update: latest version available from ${FLEX_MANIFEST_URL} is ${latestAvailableVersion}` - ) - } - return LATEST_OT_SYSTEM_VERSION - }) - .catch((e: Error) => { - log.warn( - `Update: error fetching latest system version from ${FLEX_MANIFEST_URL}: ${e.message}, keeping latest version at ${LATEST_OT_SYSTEM_VERSION}` - ) - return LATEST_OT_SYSTEM_VERSION - }) -} - -export const getLatestVersion = (): string => { - return LATEST_OT_SYSTEM_VERSION -} - -export const getCurrentVersion = (): string => PKG_VERSION - -export const isUpdateAvailable = (): boolean => - getLatestVersion() !== getCurrentVersion() - -export function registerUpdate( - dispatch: Dispatch -): (action: Action) => unknown { - return function handleAction(action: Action) { - switch (action.type) { - case UI_INITIALIZED: - case 'shell:CHECK_UPDATE': - return updateLatestVersion() - } - } -} - -export function registerUpdateBrightness(): (action: Action) => unknown { - return function handleAction(action: Action) { - switch (action.type) { - case UPDATE_BRIGHTNESS: - console.log('update the brightness') - systemd - .updateBrightness(action.payload.message) - .catch(err => - log.debug('Something wrong when updating the brightness', err) - ) - break - } - } -} diff --git a/app-shell-odd/src/usb.ts b/app-shell-odd/src/usb.ts index 44252c6a339..1c5e6bd14a7 100644 --- a/app-shell-odd/src/usb.ts +++ b/app-shell-odd/src/usb.ts @@ -2,6 +2,7 @@ import * as fs from 'fs' import * as fsPromises from 'fs/promises' import { join } from 'path' import { flatten } from 'lodash' +import { createLogger } from './log' import { robotMassStorageDeviceAdded, robotMassStorageDeviceEnumerated, @@ -16,7 +17,12 @@ import type { Dispatch, Action } from './types' const FLEX_USB_MOUNT_DIR = '/media/' const FLEX_USB_DEVICE_DIR = '/dev/' -const FLEX_USB_MOUNT_FILTER = /sd[a-z]+[0-9]+$/ +// filter matches sda0, sdc9, sdb +const FLEX_USB_DEVICE_FILTER = /sd[a-z]+[0-9]*$/ +// filter matches sda0, sdc9, sdb, VOLUME-sdc10 +const FLEX_USB_MOUNT_FILTER = /([^/]+-)?(sd[a-z]+[0-9]*)$/ + +const log = createLogger('mass-storage') // These are for backoff algorithm // apply the delay from 1 sec 64 sec @@ -48,11 +54,15 @@ const isWeirdDirectoryAndShouldSkip = (dirName: string): boolean => .map(keyword => dirName.includes(keyword)) .reduce((prev, current) => prev || current, false) -const enumerateMassStorage = (path: string): Promise => { +const doEnumerateMassStorage = ( + path: string, + depth: number +): Promise => { + log.info(`Enumerating mass storage path ${path}`) return callWithRetry(() => fsPromises.readdir(path).then(entries => { - if (entries.length === 0) { - throw new Error('No entries found, retrying...') + if (entries.length === 0 && depth === 0) { + throw new Error('No entries found for top level, retrying...') } return entries }) @@ -62,29 +72,44 @@ const enumerateMassStorage = (path: string): Promise => { Promise.all( entries.map(entry => entry.isDirectory() && !isWeirdDirectoryAndShouldSkip(entry.name) - ? enumerateMassStorage(join(path, entry.name)) + ? doEnumerateMassStorage(join(path, entry.name), depth + 1) : new Promise(resolve => { resolve([join(path, entry.name)]) }) ) ) ) - .catch(error => { - console.error(`Error enumerating mass storage: ${error}`) + .catch((error: Error) => { + log.error( + `Error enumerating mass storage path ${path}: ${error.name}: ${error.message}` + ) return [] }) .then(flatten) - .then(result => { - return result - }) + .then(result => result) +} + +const enumerateMassStorage = (path: string): Promise => { + log.info(`Beginning scan of mass storage device at ${path}`) + return doEnumerateMassStorage(path, 0).then(results => { + log.info(`Found ${results.length} files in ${path}`) + return results + }) } + export function watchForMassStorage(dispatch: Dispatch): () => void { - console.log('watching for mass storage') + log.info('watching for mass storage') let prevDirs: string[] = [] const handleNewlyPresent = (path: string): Promise => { dispatch(robotMassStorageDeviceAdded(path)) return enumerateMassStorage(path) .then(contents => { + log.debug( + `mass storage device at ${path} enumerated: ${JSON.stringify( + contents + )}` + ) + log.info(`Enumerated ${path} with ${contents.length} results`) dispatch(robotMassStorageDeviceEnumerated(path, contents)) }) .then(() => path) @@ -101,6 +126,9 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { const newlyAbsent = prevDirs.filter( entry => !sortedEntries.includes(entry) ) + log.info( + `rescan: newly present: ${newlyPresent} newly absent: ${newlyAbsent}` + ) return Promise.all([ ...newlyAbsent.map(entry => { if (entry.match(FLEX_USB_MOUNT_FILTER)) { @@ -119,6 +147,7 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { ]) }) .then(present => { + log.info(`now present: ${present}`) prevDirs = present.filter((entry): entry is string => entry !== null) }) @@ -133,6 +162,9 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { return } if (!fileName.match(FLEX_USB_MOUNT_FILTER)) { + log.debug( + `mediaWatcher: filename ${fileName} does not match ${FLEX_USB_MOUNT_FILTER}` + ) return } const fullPath = join(FLEX_USB_MOUNT_DIR, fileName) @@ -140,25 +172,36 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { .stat(fullPath) .then(info => { if (!info.isDirectory) { + log.debug(`mediaWatcher: ${fullPath} is not a directory`) return } if (prevDirs.includes(fullPath)) { + log.debug(`mediaWatcher: ${fullPath} is known`) return } - console.log(`New mass storage device ${fileName} detected`) + log.info(`New mass storage device ${fileName} detected`) prevDirs.push(fullPath) return handleNewlyPresent(fullPath) }) - .catch(() => { + .catch(err => { if (prevDirs.includes(fullPath)) { - console.log(`Mass storage device at ${fileName} removed`) + log.info( + `Mass storage device at ${fileName} removed because its mount point disappeared`, + err + ) prevDirs = prevDirs.filter(entry => entry !== fullPath) dispatch(robotMassStorageDeviceRemoved(fullPath)) + } else { + log.debug( + `Mass storage device candidate mountpoint at ${fileName} disappeared`, + err + ) } }) } ) } catch { + log.error(`Failed to start watcher for ${FLEX_USB_MOUNT_DIR}`) return null } } @@ -170,21 +213,42 @@ export function watchForMassStorage(dispatch: Dispatch): () => void { { persistent: true }, (event, fileName) => { if (!!!fileName) return - if (!fileName.match(FLEX_USB_MOUNT_FILTER)) return - const fullPath = join(FLEX_USB_DEVICE_DIR, fileName) - const mountPath = join(FLEX_USB_MOUNT_DIR, fileName) - fsPromises.stat(fullPath).catch(() => { - if (prevDirs.includes(mountPath)) { - console.log(`Mass storage device at ${fileName} removed`) - prevDirs = prevDirs.filter(entry => entry !== mountPath) - dispatch( - robotMassStorageDeviceRemoved(join(FLEX_USB_MOUNT_DIR, fileName)) + if (!fileName.match(FLEX_USB_DEVICE_FILTER)) return + if (event !== 'rename') { + log.debug( + `devWatcher: ignoring ${event} event for ${fileName} (not rename)` + ) + return + } + log.debug(`devWatcher: ${event} event for ${fileName}`) + fsPromises + .readdir(FLEX_USB_DEVICE_DIR) + .then(contents => { + if (contents.includes(fileName)) { + log.debug( + `devWatcher: ${fileName} found in /dev, this is an attach` + ) + // this is an attach + return + } + const prevDir = prevDirs.filter(dir => dir.includes(fileName)).at(0) + log.debug( + `devWatcher: ${fileName} not in /dev, this is a remove, previously mounted at ${prevDir}` ) - // we don't care if this fails because it's racing the system removing - // the mount dir in the common case - fsPromises.unlink(mountPath).catch(() => {}) - } - }) + if (prevDir != null) { + log.info(`Mass storage device at ${fileName} removed`) + prevDirs = prevDirs.filter(entry => entry !== prevDir) + dispatch(robotMassStorageDeviceRemoved(prevDir)) + // we don't care if this fails because it's racing the system removing + // the mount dir in the common case + fsPromises.unlink(prevDir).catch(() => {}) + } + }) + .catch(err => { + log.info( + `Failed to handle mass storage device ${fileName}: ${err.name}: ${err.message}` + ) + }) } ) diff --git a/app-shell/src/config/actions.ts b/app-shell/src/config/actions.ts index eabc9b47a16..5d96e6c1171 100644 --- a/app-shell/src/config/actions.ts +++ b/app-shell/src/config/actions.ts @@ -111,6 +111,7 @@ import type { export const configInitialized = (config: Config): ConfigInitializedAction => ({ type: CONFIG_INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -120,6 +121,7 @@ export const configValueUpdated = ( ): ConfigValueUpdatedAction => ({ type: VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export const customLabwareList = ( diff --git a/app-shell/src/main.ts b/app-shell/src/main.ts index ef422a455cc..0f4ab41733b 100644 --- a/app-shell/src/main.ts +++ b/app-shell/src/main.ts @@ -18,7 +18,6 @@ import { registerProtocolStorage } from './protocol-storage' import { getConfig, getStore, getOverrides, registerConfig } from './config' import { registerUsb } from './usb' import { registerNotify, closeAllNotifyConnections } from './notifications' - import type { BrowserWindow } from 'electron' import type { Action, Dispatch, Logger } from './types' import type { LogEntry } from 'winston' diff --git a/app-shell/src/types.ts b/app-shell/src/types.ts index 8a1bea51a20..f608b4512af 100644 --- a/app-shell/src/types.ts +++ b/app-shell/src/types.ts @@ -96,9 +96,11 @@ export type CLEAR_CACHE_TYPE = 'discovery:CLEAR_CACHE' export interface ConfigInitializedAction { type: CONFIG_INITIALIZED_TYPE payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: CONFIG_VALUE_UPDATED_TYPE payload: { path: string; value: any } + meta: { shell: true } } diff --git a/app/src/assets/localization/en/run_details.json b/app/src/assets/localization/en/run_details.json index e9f39f81d06..28df0734619 100644 --- a/app/src/assets/localization/en/run_details.json +++ b/app/src/assets/localization/en/run_details.json @@ -31,9 +31,11 @@ "custom_values": "Custom values", "data_out_of_date": "This data is likely out of date", "date": "Date", + "device_details": "Device details", "door_is_open": "Robot door is open", "door_open_pause": "Current Step - Paused - Door Open", "download": "Download", + "download_files": "Download files", "download_run_log": "Download run log", "downloading_run_log": "Downloading run log", "drop_tip": "Dropping tip in {{well_name}} of {{labware}} in {{labware_location}}", @@ -45,6 +47,7 @@ "error_info": "Error {{errorCode}}: {{errorType}}", "error_type": "Error: {{errorType}}", "failed_step": "Failed step", + "files_available_robot_details": "All files associated with the protocol run are available on the robot detail screen.", "final_step": "Final Step", "ignore_stored_data": "Ignore stored data", "labware": "labware", diff --git a/app/src/local-resources/instruments/hooks.ts b/app/src/local-resources/instruments/hooks.ts deleted file mode 100644 index 713dd6f1c83..00000000000 --- a/app/src/local-resources/instruments/hooks.ts +++ /dev/null @@ -1,80 +0,0 @@ -import { - getGripperDisplayName, - getPipetteModelSpecs, - getPipetteNameSpecs, - getPipetteSpecsV2, - GRIPPER_MODELS, -} from '@opentrons/shared-data' -import { useIsOEMMode } from '/app/resources/robot-settings/hooks' - -import type { - GripperModel, - PipetteModel, - PipetteModelSpecs, - PipetteName, - PipetteNameSpecs, - PipetteV2Specs, -} from '@opentrons/shared-data' - -export function usePipetteNameSpecs( - name: PipetteName -): PipetteNameSpecs | null { - const isOEMMode = useIsOEMMode() - const pipetteNameSpecs = getPipetteNameSpecs(name) - - if (pipetteNameSpecs == null) return null - - const brandedDisplayName = pipetteNameSpecs.displayName - const anonymizedDisplayName = pipetteNameSpecs.displayName.replace( - 'Flex ', - '' - ) - - const displayName = isOEMMode ? anonymizedDisplayName : brandedDisplayName - - return { ...pipetteNameSpecs, displayName } -} - -export function usePipetteModelSpecs( - model: PipetteModel -): PipetteModelSpecs | null { - const modelSpecificFields = getPipetteModelSpecs(model) - const pipetteNameSpecs = usePipetteNameSpecs( - modelSpecificFields?.name as PipetteName - ) - - if (modelSpecificFields == null || pipetteNameSpecs == null) return null - - return { ...modelSpecificFields, displayName: pipetteNameSpecs.displayName } -} - -export function usePipetteSpecsV2( - name?: PipetteName | PipetteModel -): PipetteV2Specs | null { - const isOEMMode = useIsOEMMode() - const pipetteSpecs = getPipetteSpecsV2(name) - - if (pipetteSpecs == null) return null - - const brandedDisplayName = pipetteSpecs.displayName - const anonymizedDisplayName = pipetteSpecs.displayName.replace('Flex ', '') - - const displayName = isOEMMode ? anonymizedDisplayName : brandedDisplayName - - return { ...pipetteSpecs, displayName } -} - -export function useGripperDisplayName(gripperModel: GripperModel): string { - const isOEMMode = useIsOEMMode() - - let brandedDisplayName = '' - - // check to only call display name helper for a gripper model - if (GRIPPER_MODELS.includes(gripperModel)) { - brandedDisplayName = getGripperDisplayName(gripperModel) - } - - const anonymizedDisplayName = brandedDisplayName.replace('Flex ', '') - - return isOEMMode ? anonymizedDisplayName : brandedDisplayName -} diff --git a/app/src/local-resources/instruments/hooks/index.ts b/app/src/local-resources/instruments/hooks/index.ts new file mode 100644 index 00000000000..6cfd0af2293 --- /dev/null +++ b/app/src/local-resources/instruments/hooks/index.ts @@ -0,0 +1,5 @@ +export * from './useGripperDisplayName' +export * from './useHomePipettes' +export * from './usePipetteModelSpecs' +export * from './usePipetteNameSpecs' +export * from './usePipetteSpecsv2' diff --git a/app/src/local-resources/instruments/hooks/useGripperDisplayName.ts b/app/src/local-resources/instruments/hooks/useGripperDisplayName.ts new file mode 100644 index 00000000000..fd1b8262a79 --- /dev/null +++ b/app/src/local-resources/instruments/hooks/useGripperDisplayName.ts @@ -0,0 +1,19 @@ +import { getGripperDisplayName, GRIPPER_MODELS } from '@opentrons/shared-data' +import { useIsOEMMode } from '/app/resources/robot-settings' + +import type { GripperModel } from '@opentrons/shared-data' + +export function useGripperDisplayName(gripperModel: GripperModel): string { + const isOEMMode = useIsOEMMode() + + let brandedDisplayName = '' + + // check to only call display name helper for a gripper model + if (GRIPPER_MODELS.includes(gripperModel)) { + brandedDisplayName = getGripperDisplayName(gripperModel) + } + + const anonymizedDisplayName = brandedDisplayName.replace('Flex ', '') + + return isOEMMode ? anonymizedDisplayName : brandedDisplayName +} diff --git a/app/src/organisms/DropTipWizardFlows/hooks/useHomePipettes.ts b/app/src/local-resources/instruments/hooks/useHomePipettes.ts similarity index 90% rename from app/src/organisms/DropTipWizardFlows/hooks/useHomePipettes.ts rename to app/src/local-resources/instruments/hooks/useHomePipettes.ts index c0e58ef5bb5..da139c14651 100644 --- a/app/src/organisms/DropTipWizardFlows/hooks/useHomePipettes.ts +++ b/app/src/local-resources/instruments/hooks/useHomePipettes.ts @@ -1,12 +1,13 @@ import { useRobotControlCommands } from '/app/resources/maintenance_runs' import type { CreateCommand } from '@opentrons/shared-data' + import type { UseRobotControlCommandsProps, UseRobotControlCommandsResult, } from '/app/resources/maintenance_runs' -interface UseHomePipettesResult { +export interface UseHomePipettesResult { isHoming: UseRobotControlCommandsResult['isExecuting'] homePipettes: UseRobotControlCommandsResult['executeCommands'] } @@ -15,7 +16,7 @@ export type UseHomePipettesProps = Pick< UseRobotControlCommandsProps, 'pipetteInfo' | 'onSettled' > -// TODO(jh, 09-12-24): Find a better place for this hook to live. + // Home pipettes except for plungers. export function useHomePipettes( props: UseHomePipettesProps diff --git a/app/src/local-resources/instruments/hooks/usePipetteModelSpecs.ts b/app/src/local-resources/instruments/hooks/usePipetteModelSpecs.ts new file mode 100644 index 00000000000..afbc2f205fa --- /dev/null +++ b/app/src/local-resources/instruments/hooks/usePipetteModelSpecs.ts @@ -0,0 +1,24 @@ +import { getPipetteModelSpecs } from '@opentrons/shared-data' + +import { usePipetteNameSpecs } from './usePipetteNameSpecs' + +import type { + PipetteModel, + PipetteModelSpecs, + PipetteName, +} from '@opentrons/shared-data' + +export function usePipetteModelSpecs( + model: PipetteModel +): PipetteModelSpecs | null { + const modelSpecificFields = getPipetteModelSpecs(model) + const pipetteNameSpecs = usePipetteNameSpecs( + modelSpecificFields?.name as PipetteName + ) + + if (modelSpecificFields == null || pipetteNameSpecs == null) { + return null + } + + return { ...modelSpecificFields, displayName: pipetteNameSpecs.displayName } +} diff --git a/app/src/local-resources/instruments/hooks/usePipetteNameSpecs.ts b/app/src/local-resources/instruments/hooks/usePipetteNameSpecs.ts new file mode 100644 index 00000000000..85a29b2fef7 --- /dev/null +++ b/app/src/local-resources/instruments/hooks/usePipetteNameSpecs.ts @@ -0,0 +1,26 @@ +import { getPipetteNameSpecs } from '@opentrons/shared-data' + +import { useIsOEMMode } from '/app/resources/robot-settings' + +import type { PipetteName, PipetteNameSpecs } from '@opentrons/shared-data' + +export function usePipetteNameSpecs( + name: PipetteName +): PipetteNameSpecs | null { + const isOEMMode = useIsOEMMode() + const pipetteNameSpecs = getPipetteNameSpecs(name) + + if (pipetteNameSpecs == null) { + return null + } + + const brandedDisplayName = pipetteNameSpecs.displayName + const anonymizedDisplayName = pipetteNameSpecs.displayName.replace( + 'Flex ', + '' + ) + + const displayName = isOEMMode ? anonymizedDisplayName : brandedDisplayName + + return { ...pipetteNameSpecs, displayName } +} diff --git a/app/src/local-resources/instruments/hooks/usePipetteSpecsv2.ts b/app/src/local-resources/instruments/hooks/usePipetteSpecsv2.ts new file mode 100644 index 00000000000..951c1d857f1 --- /dev/null +++ b/app/src/local-resources/instruments/hooks/usePipetteSpecsv2.ts @@ -0,0 +1,27 @@ +import { getPipetteSpecsV2 } from '@opentrons/shared-data' + +import { useIsOEMMode } from '/app/resources/robot-settings' + +import type { + PipetteModel, + PipetteName, + PipetteV2Specs, +} from '@opentrons/shared-data' + +export function usePipetteSpecsV2( + name?: PipetteName | PipetteModel +): PipetteV2Specs | null { + const isOEMMode = useIsOEMMode() + const pipetteSpecs = getPipetteSpecsV2(name) + + if (pipetteSpecs == null) { + return null + } + + const brandedDisplayName = pipetteSpecs.displayName + const anonymizedDisplayName = pipetteSpecs.displayName.replace('Flex ', '') + + const displayName = isOEMMode ? anonymizedDisplayName : brandedDisplayName + + return { ...pipetteSpecs, displayName } +} diff --git a/app/src/local-resources/labware/utils/__tests__/getLabwareDisplayLocation.test.tsx b/app/src/local-resources/labware/utils/__tests__/getLabwareDisplayLocation.test.tsx new file mode 100644 index 00000000000..22e02478ded --- /dev/null +++ b/app/src/local-resources/labware/utils/__tests__/getLabwareDisplayLocation.test.tsx @@ -0,0 +1,173 @@ +import { describe, it, expect, vi } from 'vitest' +import { screen } from '@testing-library/react' +import { useTranslation } from 'react-i18next' + +import { + FLEX_ROBOT_TYPE, + getModuleDisplayName, + getModuleType, + getOccludedSlotCountForModule, + getLabwareDefURI, + getLabwareDisplayName, +} from '@opentrons/shared-data' + +import { renderWithProviders } from '/app/__testing-utils__' +import { i18n } from '/app/i18n' +import { getLabwareDisplayLocation } from '/app/local-resources/labware' +import { + getModuleModel, + getModuleDisplayLocation, +} from '/app/local-resources/modules' + +import type { ComponentProps } from 'react' +import type { LabwareLocation } from '@opentrons/shared-data' + +vi.mock('@opentrons/shared-data', async () => { + const actual = await vi.importActual('@opentrons/shared-data') + return { + ...actual, + getModuleDisplayName: vi.fn(), + getModuleType: vi.fn(), + getOccludedSlotCountForModule: vi.fn(), + getLabwareDefURI: vi.fn(), + getLabwareDisplayName: vi.fn(), + } +}) + +vi.mock('/app/local-resources/modules', () => ({ + getModuleModel: vi.fn(), + getModuleDisplayLocation: vi.fn(), +})) + +const TestWrapper = ({ + location, + params, +}: { + location: LabwareLocation | null + params: any +}) => { + const { t } = useTranslation('protocol_command_text') + const displayLocation = getLabwareDisplayLocation({ ...params, location, t }) + return

{displayLocation}
+} + +const render = (props: ComponentProps) => { + return renderWithProviders(, { + i18nInstance: i18n, + })[0] +} + +describe('getLabwareDisplayLocation with translations', () => { + const defaultParams = { + loadedLabwares: [], + loadedModules: [], + robotType: FLEX_ROBOT_TYPE, + allRunDefs: [], + } + + it('should return an empty string for null location', () => { + render({ location: null, params: defaultParams }) + expect(screen.queryByText(/.+/)).toBeNull() + }) + + it('should return "off deck" for offDeck location', () => { + render({ location: 'offDeck', params: defaultParams }) + + screen.getByText('off deck') + }) + + it('should return a slot name for slot location', () => { + render({ location: { slotName: 'A1' }, params: defaultParams }) + + screen.getByText('Slot A1') + }) + + it('should return an addressable area name for an addressable area location', () => { + render({ location: { addressableAreaName: 'B2' }, params: defaultParams }) + + screen.getByText('Slot B2') + }) + + it('should return a module location for a module location', () => { + const mockModuleModel = 'temperatureModuleV2' + vi.mocked(getModuleModel).mockReturnValue(mockModuleModel) + vi.mocked(getModuleDisplayLocation).mockReturnValue('3') + vi.mocked(getModuleDisplayName).mockReturnValue('Temperature Module') + vi.mocked(getModuleType).mockReturnValue('temperatureModuleType') + vi.mocked(getOccludedSlotCountForModule).mockReturnValue(1) + + render({ location: { moduleId: 'temp123' }, params: defaultParams }) + + screen.getByText('Temperature Module in Slot 3') + }) + + it('should return an adapter location for an adapter location', () => { + const mockLoadedLabwares = [ + { + id: 'adapter123', + definitionUri: 'adapter-uri', + location: { slotName: 'D1' }, + }, + ] + const mockAllRunDefs = [ + { uri: 'adapter-uri', metadata: { displayName: 'Mock Adapter' } }, + ] + vi.mocked(getLabwareDefURI).mockReturnValue('adapter-uri') + vi.mocked(getLabwareDisplayName).mockReturnValue('Mock Adapter') + + render({ + location: { labwareId: 'adapter123' }, + params: { + ...defaultParams, + loadedLabwares: mockLoadedLabwares, + allRunDefs: mockAllRunDefs, + detailLevel: 'full', + }, + }) + + screen.getByText('Mock Adapter in D1') + }) + + it('should return a slot-only location when detailLevel is "slot-only"', () => { + render({ + location: { slotName: 'C1' }, + params: { ...defaultParams, detailLevel: 'slot-only' }, + }) + + screen.getByText('Slot C1') + }) + + it('should handle an adapter on module location when the detail level is full', () => { + const mockLoadedLabwares = [ + { + id: 'adapter123', + definitionUri: 'adapter-uri', + location: { moduleId: 'temp123' }, + }, + ] + const mockLoadedModules = [{ id: 'temp123', model: 'temperatureModuleV2' }] + const mockAllRunDefs = [ + { uri: 'adapter-uri', metadata: { displayName: 'Mock Adapter' } }, + ] + + vi.mocked(getLabwareDefURI).mockReturnValue('adapter-uri') + vi.mocked(getLabwareDisplayName).mockReturnValue('Mock Adapter') + vi.mocked(getModuleDisplayLocation).mockReturnValue('2') + vi.mocked(getModuleDisplayName).mockReturnValue('Temperature Module') + vi.mocked(getModuleType).mockReturnValue('temperatureModuleType') + vi.mocked(getOccludedSlotCountForModule).mockReturnValue(1) + + render({ + location: { labwareId: 'adapter123' }, + params: { + ...defaultParams, + loadedLabwares: mockLoadedLabwares, + loadedModules: mockLoadedModules, + allRunDefs: mockAllRunDefs, + detailLevel: 'full', + }, + }) + + screen.getByText('Mock Adapter on Temperature Module in 2') + }) +}) diff --git a/app/src/local-resources/labware/utils/getLabwareDisplayLocation.ts b/app/src/local-resources/labware/utils/getLabwareDisplayLocation.ts index 3f56eb165b1..d70e6d19d42 100644 --- a/app/src/local-resources/labware/utils/getLabwareDisplayLocation.ts +++ b/app/src/local-resources/labware/utils/getLabwareDisplayLocation.ts @@ -13,37 +13,57 @@ import { import type { TFunction } from 'i18next' import type { - RobotType, - LabwareLocation, LabwareDefinition2, + LabwareLocation, + RobotType, } from '@opentrons/shared-data' -import type { LoadedModules } from '/app/local-resources/modules' import type { LoadedLabwares } from '/app/local-resources/labware' +import type { LoadedModules } from '/app/local-resources/modules' -export interface UseLabwareDisplayLocationProps { +interface LabwareDisplayLocationBaseParams { location: LabwareLocation | null loadedModules: LoadedModules loadedLabwares: LoadedLabwares - allRunDefs: LabwareDefinition2[] robotType: RobotType t: TFunction isOnDevice?: boolean } -export function getLabwareDisplayLocation({ - loadedLabwares, - loadedModules, - allRunDefs, - location, - robotType, - t, - isOnDevice = false, -}: UseLabwareDisplayLocationProps): string { +export interface LabwareDisplayLocationSlotOnly + extends LabwareDisplayLocationBaseParams { + detailLevel: 'slot-only' +} + +export interface LabwareDisplayLocationFull + extends LabwareDisplayLocationBaseParams { + detailLevel?: 'full' + allRunDefs: LabwareDefinition2[] +} + +export type LabwareDisplayLocationParams = + | LabwareDisplayLocationSlotOnly + | LabwareDisplayLocationFull + +// detailLevel applies to nested labware. If 'full', return copy that includes the actual peripheral that nests the +// labware, ex, "in module XYZ in slot C1". +// If 'slot-only', return only the slot name, ex "in slot C1". +export function getLabwareDisplayLocation( + params: LabwareDisplayLocationParams +): string { + const { + loadedLabwares, + loadedModules, + location, + robotType, + t, + isOnDevice = false, + detailLevel = 'full', + } = params + if (location == null) { - console.warn('Cannot get labware display location. No location provided.') + console.error('Cannot get labware display location. No location provided.') return '' - } - if (location === 'offDeck') { + } else if (location === 'offDeck') { return t('off_deck') } else if ('slotName' in location) { return isOnDevice @@ -56,88 +76,105 @@ export function getLabwareDisplayLocation({ } else if ('moduleId' in location) { const moduleModel = getModuleModel(loadedModules, location.moduleId) if (moduleModel == null) { - console.warn('labware is located on an unknown module model') + console.error('labware is located on an unknown module model') return '' - } else { - const slotName = getModuleDisplayLocation( - loadedModules, - location.moduleId - ) - return isOnDevice - ? `${getModuleDisplayName(moduleModel)}, ${slotName}` - : t('module_in_slot', { - count: getOccludedSlotCountForModule( - getModuleType(moduleModel), - robotType - ), - module: getModuleDisplayName(moduleModel), - slot_name: slotName, - }) } + const slotName = getModuleDisplayLocation(loadedModules, location.moduleId) + + if (detailLevel === 'slot-only') { + return t('slot', { slot_name: slotName }) + } + + return isOnDevice + ? `${getModuleDisplayName(moduleModel)}, ${slotName}` + : t('module_in_slot', { + count: getOccludedSlotCountForModule( + getModuleType(moduleModel), + robotType + ), + module: getModuleDisplayName(moduleModel), + slot_name: slotName, + }) } else if ('labwareId' in location) { if (!Array.isArray(loadedLabwares)) { - console.warn('Cannot get display location from loaded labwares object') + console.error('Cannot get display location from loaded labwares object') return '' } const adapter = loadedLabwares.find(lw => lw.id === location.labwareId) - const adapterDef = allRunDefs.find( - def => getLabwareDefURI(def) === adapter?.definitionUri - ) - const adapterDisplayName = - adapterDef != null ? getLabwareDisplayName(adapterDef) : '' if (adapter == null) { - console.warn('labware is located on an unknown adapter') + console.error('labware is located on an unknown adapter') return '' - } else if (adapter.location === 'offDeck') { - return t('off_deck') - } else if ('slotName' in adapter.location) { - return t('adapter_in_slot', { - adapter: adapterDisplayName, - slot: adapter.location.slotName, + } else if (detailLevel === 'slot-only') { + return getLabwareDisplayLocation({ + ...params, + location: adapter.location, }) - } else if ('addressableAreaName' in adapter.location) { - return t('adapter_in_slot', { - adapter: adapterDisplayName, - slot: adapter.location.addressableAreaName, - }) - } else if ('moduleId' in adapter.location) { - const moduleIdUnderAdapter = adapter.location.moduleId + } else if (detailLevel === 'full') { + const { allRunDefs } = params as LabwareDisplayLocationFull + const adapterDef = allRunDefs.find( + def => getLabwareDefURI(def) === adapter?.definitionUri + ) + const adapterDisplayName = + adapterDef != null ? getLabwareDisplayName(adapterDef) : '' - if (!Array.isArray(loadedModules)) { - console.warn('Cannot get display location from loaded labwares object') - return '' - } + if (adapter.location === 'offDeck') { + return t('off_deck') + } else if ( + 'slotName' in adapter.location || + 'addressableAreaName' in adapter.location + ) { + const slotName = + 'slotName' in adapter.location + ? adapter.location.slotName + : adapter.location.addressableAreaName + return t('adapter_in_slot', { + adapter: adapterDisplayName, + slot: slotName, + }) + } else if ('moduleId' in adapter.location) { + const moduleIdUnderAdapter = adapter.location.moduleId + + if (!Array.isArray(loadedModules)) { + console.error( + 'Cannot get display location from loaded modules object' + ) + return '' + } + + const moduleModel = loadedModules.find( + module => module.id === moduleIdUnderAdapter + )?.model + if (moduleModel == null) { + console.error('labware is located on an adapter on an unknown module') + return '' + } + const slotName = getModuleDisplayLocation( + loadedModules, + adapter.location.moduleId + ) - const moduleModel = loadedModules.find( - module => module.id === moduleIdUnderAdapter - )?.model - if (moduleModel == null) { - console.warn('labware is located on an adapter on an unknown module') + return t('adapter_in_mod_in_slot', { + count: getOccludedSlotCountForModule( + getModuleType(moduleModel), + robotType + ), + module: getModuleDisplayName(moduleModel), + adapter: adapterDisplayName, + slot: slotName, + }) + } else { + console.error( + 'Unhandled adapter location for determining display location.' + ) return '' } - const slotName = getModuleDisplayLocation( - loadedModules, - adapter.location.moduleId - ) - return t('adapter_in_mod_in_slot', { - count: getOccludedSlotCountForModule( - getModuleType(moduleModel), - robotType - ), - module: getModuleDisplayName(moduleModel), - adapter: adapterDisplayName, - slot: slotName, - }) } else { - console.warn( - 'display location on adapter could not be established: ', - location - ) + console.error('Unhandled detail level for determining display location.') return '' } } else { - console.warn('display location could not be established: ', location) + console.error('display location could not be established: ', location) return '' } } diff --git a/app/src/organisms/Desktop/Devices/ChangePipette/InstructionStep.tsx b/app/src/organisms/Desktop/Devices/ChangePipette/InstructionStep.tsx index 05d43fdd11c..5b6338be6a5 100644 --- a/app/src/organisms/Desktop/Devices/ChangePipette/InstructionStep.tsx +++ b/app/src/organisms/Desktop/Devices/ChangePipette/InstructionStep.tsx @@ -30,13 +30,13 @@ export function InstructionStep(props: Props): JSX.Element { const display = displayCategory === 'GEN2' ? new URL( - `/app/assets/images/change-pip/${direction}-${String( + `../../../../assets/images/change-pip/${direction}-${String( mount )}-${channelsKey}-GEN2-${diagram}@3x.png`, import.meta.url ).href : new URL( - `/app/assets/images/change-pip/${direction}-${String( + `../../../../assets/images/change-pip/${direction}-${String( mount )}-${channelsKey}-${diagram}@3x.png`, import.meta.url diff --git a/app/src/organisms/Desktop/Devices/ChangePipette/LevelPipette.tsx b/app/src/organisms/Desktop/Devices/ChangePipette/LevelPipette.tsx index db49a4d6861..fb1120daec7 100644 --- a/app/src/organisms/Desktop/Devices/ChangePipette/LevelPipette.tsx +++ b/app/src/organisms/Desktop/Devices/ChangePipette/LevelPipette.tsx @@ -26,6 +26,11 @@ export function LevelingVideo(props: { mount: Mount }): JSX.Element { const { pipetteName, mount } = props + const video = new URL( + `../../../../assets/videos/pip-leveling/${pipetteName}-${mount}.webm`, + import.meta.url + ).href + return ( ) } diff --git a/app/src/organisms/Desktop/Devices/HistoricalProtocolRunDrawer.tsx b/app/src/organisms/Desktop/Devices/HistoricalProtocolRunDrawer.tsx index 6533895bb1e..1570d560aac 100644 --- a/app/src/organisms/Desktop/Devices/HistoricalProtocolRunDrawer.tsx +++ b/app/src/organisms/Desktop/Devices/HistoricalProtocolRunDrawer.tsx @@ -56,6 +56,10 @@ export function HistoricalProtocolRunDrawer( return acc }, []) : [] + if ('outputFileIds' in run && run.outputFileIds.length > 0) { + runDataFileIds.push(...run.outputFileIds) + } + const uniqueLabwareOffsets = allLabwareOffsets?.filter( (offset, index, array) => { return ( diff --git a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderBannerContainer/index.tsx b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderBannerContainer/index.tsx index e05a11eb391..5c7c6e01621 100644 --- a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderBannerContainer/index.tsx +++ b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderBannerContainer/index.tsx @@ -1,6 +1,19 @@ import { useTranslation } from 'react-i18next' +import { useNavigate } from 'react-router-dom' -import { Box, SPACING, Banner } from '@opentrons/components' +import { + Box, + StyledText, + Link, + SPACING, + Banner, + Flex, + DIRECTION_COLUMN, + JUSTIFY_SPACE_BETWEEN, + DIRECTION_ROW, + ALIGN_CENTER, + TEXT_DECORATION_UNDERLINE, +} from '@opentrons/components' import { ProtocolAnalysisErrorBanner } from './ProtocolAnalysisErrorBanner' import { @@ -21,17 +34,25 @@ export type RunHeaderBannerContainerProps = ProtocolRunHeaderProps & { isResetRunLoading: boolean runErrors: UseRunErrorsResult runHeaderModalContainerUtils: UseRunHeaderModalContainerResult + hasDownloadableFiles: boolean } // Holds all the various banners that render in ProtocolRunHeader. export function RunHeaderBannerContainer( props: RunHeaderBannerContainerProps ): JSX.Element | null { - const { runStatus, enteredER, runHeaderModalContainerUtils } = props + const navigate = useNavigate() + const { + runStatus, + enteredER, + runHeaderModalContainerUtils, + hasDownloadableFiles, + robotName, + } = props const { analysisErrorModalUtils } = runHeaderModalContainerUtils const { t } = useTranslation(['run_details', 'shared']) - const isDoorOpen = useIsDoorOpen(props.robotName) + const isDoorOpen = useIsDoorOpen(robotName) const { showRunCanceledBanner, @@ -73,6 +94,36 @@ export function RunHeaderBannerContainer( {...props} /> ) : null} + {hasDownloadableFiles ? ( + + + + + {t('download_files')} + + + {t('files_available_robot_details')} + + + { + navigate(`/devices/${robotName}`) + }} + > + {t('device_details')} + + + + ) : null} ) } diff --git a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/ProtocolDropTipModal.tsx b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/ProtocolDropTipModal.tsx index 7d96803c4a6..e1f1be57d22 100644 --- a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/ProtocolDropTipModal.tsx +++ b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/ProtocolDropTipModal.tsx @@ -16,14 +16,12 @@ import { } from '@opentrons/components' import { TextOnlyButton } from '/app/atoms/buttons' -import { useHomePipettes } from '/app/organisms/DropTipWizardFlows' +import { useHomePipettes } from '/app/local-resources/instruments' import type { PipetteData } from '@opentrons/api-client' import type { IconProps } from '@opentrons/components' -import type { - UseHomePipettesProps, - TipAttachmentStatusResult, -} from '/app/organisms/DropTipWizardFlows' +import type { UseHomePipettesProps } from '/app/local-resources/instruments' +import type { TipAttachmentStatusResult } from '/app/organisms/DropTipWizardFlows' type UseProtocolDropTipModalProps = Pick< UseHomePipettesProps, diff --git a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/__tests__/ProtocolDropTipModal.test.tsx b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/__tests__/ProtocolDropTipModal.test.tsx index 56a508b9666..0d95071a969 100644 --- a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/__tests__/ProtocolDropTipModal.test.tsx +++ b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/RunHeaderModalContainer/modals/__tests__/ProtocolDropTipModal.test.tsx @@ -4,7 +4,7 @@ import { renderHook, act, screen, fireEvent } from '@testing-library/react' import { renderWithProviders } from '/app/__testing-utils__' import { i18n } from '/app/i18n' -import { useHomePipettes } from '/app/organisms/DropTipWizardFlows' +import { useHomePipettes } from '/app/local-resources/instruments' import { useProtocolDropTipModal, ProtocolDropTipModal, @@ -12,7 +12,7 @@ import { import type { Mock } from 'vitest' -vi.mock('/app/organisms/DropTipWizardFlows') +vi.mock('/app/local-resources/instruments') describe('useProtocolDropTipModal', () => { let props: Parameters[0] diff --git a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/index.tsx b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/index.tsx index b9641fcc96b..c6d33879be9 100644 --- a/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/index.tsx +++ b/app/src/organisms/Desktop/Devices/ProtocolRun/ProtocolRunHeader/index.tsx @@ -103,6 +103,11 @@ export function ProtocolRunHeader( isResetRunLoading={isResetRunLoadingRef.current} runErrors={runErrors} runHeaderModalContainerUtils={runHeaderModalContainerUtils} + hasDownloadableFiles={ + runRecord?.data != null && + 'outputFileIds' in runRecord.data && + runRecord.data.outputFileIds.length > 0 + } {...props} /> & { diff --git a/app/src/organisms/DropTipWizardFlows/hooks/index.ts b/app/src/organisms/DropTipWizardFlows/hooks/index.ts index 09acf2b2a5d..3f3f531a9d8 100644 --- a/app/src/organisms/DropTipWizardFlows/hooks/index.ts +++ b/app/src/organisms/DropTipWizardFlows/hooks/index.ts @@ -1,6 +1,5 @@ export * from './errors' export * from './useDropTipWithType' -export * from './useHomePipettes' export * from './useTipAttachmentStatus' export * from './useDropTipLocations' export { useDropTipRouting } from './useDropTipRouting' diff --git a/app/src/organisms/DropTipWizardFlows/index.ts b/app/src/organisms/DropTipWizardFlows/index.ts index 0030fa29a5a..1b53f36e5c8 100644 --- a/app/src/organisms/DropTipWizardFlows/index.ts +++ b/app/src/organisms/DropTipWizardFlows/index.ts @@ -1,10 +1,6 @@ export * from './DropTipWizardFlows' -export { useTipAttachmentStatus, useHomePipettes } from './hooks' +export { useTipAttachmentStatus } from './hooks' export * from './TipsAttachedModal' -export type { - UseHomePipettesProps, - TipAttachmentStatusResult, - PipetteWithTip, -} from './hooks' +export type { TipAttachmentStatusResult, PipetteWithTip } from './hooks' export type { FixitCommandTypeUtils } from './types' diff --git a/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx b/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx index 8acc69c8ab6..c44252e2da9 100644 --- a/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/RecoveryOptions/SelectRecoveryOption.tsx @@ -1,8 +1,10 @@ import { useState, useEffect } from 'react' import head from 'lodash/head' import { useTranslation } from 'react-i18next' +import { css } from 'styled-components' import { + RESPONSIVENESS, DIRECTION_COLUMN, Flex, SPACING, @@ -108,11 +110,7 @@ export function RecoveryOptions({ isOnDevice, }: RecoveryOptionsProps): JSX.Element { return ( - + {validRecoveryOptions.map((recoveryOption: RecoveryRoute) => { const optionName = getRecoveryOptionCopy(recoveryOption, errorKind) return ( @@ -133,6 +131,16 @@ export function RecoveryOptions({ ) } +const RECOVERY_OPTION_CONTAINER_STYLE = css` + flex-direction: ${DIRECTION_COLUMN}; + grid-gap: ${SPACING.spacing4}; + width: 100%; + + @media ${RESPONSIVENESS.touchscreenMediaQuerySpecs} { + grid-gap: ${SPACING.spacing8}; + } +` + // Pre-fetch tip attachment status. Users are not blocked from proceeding at this step. export function useCurrentTipStatus( determineTipStatus: () => Promise diff --git a/app/src/organisms/ErrorRecoveryFlows/__tests__/ErrorRecoveryFlows.test.tsx b/app/src/organisms/ErrorRecoveryFlows/__tests__/ErrorRecoveryFlows.test.tsx index a00335f6475..d73d402585d 100644 --- a/app/src/organisms/ErrorRecoveryFlows/__tests__/ErrorRecoveryFlows.test.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/__tests__/ErrorRecoveryFlows.test.tsx @@ -8,7 +8,7 @@ import { RUN_STATUS_RUNNING, RUN_STATUS_STOP_REQUESTED, } from '@opentrons/api-client' -import { getLabwareDefinitionsFromCommands } from '/app/local-resources/labware' +import { getLoadedLabwareDefinitionsByUri } from '@opentrons/shared-data' import { renderWithProviders } from '/app/__testing-utils__' import { i18n } from '/app/i18n' @@ -33,7 +33,13 @@ vi.mock('/app/redux/config') vi.mock('../RecoverySplash') vi.mock('/app/redux-resources/analytics') vi.mock('@opentrons/react-api-client') -vi.mock('/app/local-resources/labware') +vi.mock('@opentrons/shared-data', async () => { + const actual = await vi.importActual('@opentrons/shared-data') + return { + ...actual, + getLoadedLabwareDefinitionsByUri: vi.fn(), + } +}) vi.mock('react-redux', async () => { const actual = await vi.importActual('react-redux') return { @@ -45,7 +51,6 @@ vi.mock('react-redux', async () => { describe('useErrorRecoveryFlows', () => { beforeEach(() => { vi.mocked(useCurrentlyRecoveringFrom).mockReturnValue('mockCommand' as any) - vi.mocked(getLabwareDefinitionsFromCommands).mockReturnValue([]) }) it('should have initial state of isERActive as false', () => { @@ -143,7 +148,7 @@ describe('ErrorRecoveryFlows', () => { runStatus: RUN_STATUS_AWAITING_RECOVERY, failedCommandByRunRecord: mockFailedCommand, runId: 'MOCK_RUN_ID', - protocolAnalysis: {} as any, + protocolAnalysis: null, } vi.mocked(ErrorRecoveryWizard).mockReturnValue(
MOCK WIZARD
) vi.mocked(RecoverySplash).mockReturnValue(
MOCK RUN PAUSED SPLASH
) @@ -168,6 +173,7 @@ describe('ErrorRecoveryFlows', () => { intent: 'recovering', showTakeover: false, }) + vi.mocked(getLoadedLabwareDefinitionsByUri).mockReturnValue({}) }) it('renders the wizard when showERWizard is true', () => { diff --git a/app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.ts b/app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.tsx similarity index 75% rename from app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.ts rename to app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.tsx index ea5ca7c365a..b0716af5c8a 100644 --- a/app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.ts +++ b/app/src/organisms/ErrorRecoveryFlows/hooks/__tests__/useFailedLabwareUtils.test.tsx @@ -1,6 +1,8 @@ import { describe, it, expect } from 'vitest' -import { renderHook } from '@testing-library/react' +import { screen, renderHook } from '@testing-library/react' +import { renderWithProviders } from '/app/__testing-utils__' +import { i18n } from '/app/i18n' import { getRelevantWellName, getRelevantFailedLabwareCmdFrom, @@ -8,6 +10,9 @@ import { } from '../useFailedLabwareUtils' import { DEFINED_ERROR_TYPES } from '../../constants' +import type { ComponentProps } from 'react' +import type { GetRelevantLwLocationsParams } from '../useFailedLabwareUtils' + describe('getRelevantWellName', () => { const failedPipetteInfo = { data: { @@ -159,12 +164,26 @@ describe('getRelevantFailedLabwareCmdFrom', () => { }) }) -// TODO(jh 10-15-24): This testing will can more useful once translation is refactored out of this function. +const TestWrapper = (props: GetRelevantLwLocationsParams) => { + const displayLocation = useRelevantFailedLwLocations(props) + return ( + <> +
{`Current Loc: ${displayLocation.displayNameCurrentLoc}`}
+
{`New Loc: ${displayLocation.displayNameNewLoc}`}
+ + ) +} + +const render = (props: ComponentProps) => { + return renderWithProviders(, { + i18nInstance: i18n, + })[0] +} + describe('useRelevantFailedLwLocations', () => { - const mockProtocolAnalysis = {} as any - const mockAllRunDefs = [] as any + const mockRunRecord = { data: { modules: [], labware: [] } } as any const mockFailedLabware = { - location: { slot: 'D1' }, + location: { slotName: 'D1' }, } as any it('should return current location for non-moveLabware commands', () => { @@ -172,41 +191,53 @@ describe('useRelevantFailedLwLocations', () => { commandType: 'aspirate', } as any + render({ + failedLabware: mockFailedLabware, + failedCommandByRunRecord: mockFailedCommand, + runRecord: mockRunRecord, + }) + + screen.getByText('Current Loc: Slot D1') + screen.getByText('New Loc: null') + const { result } = renderHook(() => useRelevantFailedLwLocations({ failedLabware: mockFailedLabware, failedCommandByRunRecord: mockFailedCommand, - protocolAnalysis: mockProtocolAnalysis, - allRunDefs: mockAllRunDefs, + runRecord: mockRunRecord, }) ) - expect(result.current).toEqual({ - currentLoc: '', - newLoc: null, - }) + expect(result.current.currentLoc).toStrictEqual({ slotName: 'D1' }) + expect(result.current.newLoc).toBeNull() }) - it('should return current and new location for moveLabware commands', () => { + it('should return current and new locations for moveLabware commands', () => { const mockFailedCommand = { commandType: 'moveLabware', params: { - newLocation: { slot: 'C2' }, + newLocation: { slotName: 'C2' }, }, } as any + render({ + failedLabware: mockFailedLabware, + failedCommandByRunRecord: mockFailedCommand, + runRecord: mockRunRecord, + }) + + screen.getByText('Current Loc: Slot D1') + screen.getByText('New Loc: Slot C2') + const { result } = renderHook(() => useRelevantFailedLwLocations({ failedLabware: mockFailedLabware, failedCommandByRunRecord: mockFailedCommand, - protocolAnalysis: mockProtocolAnalysis, - allRunDefs: mockAllRunDefs, + runRecord: mockRunRecord, }) ) - expect(result.current).toEqual({ - currentLoc: '', - newLoc: null, - }) + expect(result.current.currentLoc).toStrictEqual({ slotName: 'D1' }) + expect(result.current.newLoc).toStrictEqual({ slotName: 'C2' }) }) }) diff --git a/app/src/organisms/ErrorRecoveryFlows/hooks/useDeckMapUtils.ts b/app/src/organisms/ErrorRecoveryFlows/hooks/useDeckMapUtils.ts index 95dac5abdb7..06453d06d08 100644 --- a/app/src/organisms/ErrorRecoveryFlows/hooks/useDeckMapUtils.ts +++ b/app/src/organisms/ErrorRecoveryFlows/hooks/useDeckMapUtils.ts @@ -2,7 +2,6 @@ import { useMemo } from 'react' import { getDeckDefFromRobotType, - getLoadedLabwareDefinitionsByUri, getFixedTrashLabwareDefinition, getModuleDef2, getPositionFromSlotId, @@ -11,6 +10,11 @@ import { THERMOCYCLER_MODULE_V1, } from '@opentrons/shared-data' +import { + getRunLabwareRenderInfo, + getRunModuleRenderInfo, +} from '/app/organisms/InterventionModal/utils' + import type { Run } from '@opentrons/api-client' import type { DeckDefinition, @@ -22,14 +26,21 @@ import type { LoadedLabware, RobotType, LabwareDefinitionsByUri, + LoadedModule, } from '@opentrons/shared-data' import type { ErrorRecoveryFlowsProps } from '..' import type { UseFailedLabwareUtilsResult } from './useFailedLabwareUtils' +import type { + RunLabwareInfo, + RunModuleInfo, +} from '/app/organisms/InterventionModal/utils' +import type { ERUtilsProps } from './useERUtils' interface UseDeckMapUtilsProps { runId: ErrorRecoveryFlowsProps['runId'] protocolAnalysis: ErrorRecoveryFlowsProps['protocolAnalysis'] failedLabwareUtils: UseFailedLabwareUtilsResult + labwareDefinitionsByUri: ERUtilsProps['labwareDefinitionsByUri'] runRecord?: Run } @@ -37,6 +48,11 @@ export interface UseDeckMapUtilsResult { deckConfig: CutoutConfigProtocolSpec[] modulesOnDeck: RunCurrentModulesOnDeck[] labwareOnDeck: RunCurrentLabwareOnDeck[] + loadedLabware: LoadedLabware[] + loadedModules: LoadedModule[] + movedLabwareDef: LabwareDefinition2 | null + moduleRenderInfo: RunModuleInfo[] + labwareRenderInfo: RunLabwareInfo[] highlightLabwareEventuallyIn: string[] kind: 'intervention' robotType: RobotType @@ -47,19 +63,12 @@ export function useDeckMapUtils({ runRecord, runId, failedLabwareUtils, + labwareDefinitionsByUri, }: UseDeckMapUtilsProps): UseDeckMapUtilsResult { const robotType = protocolAnalysis?.robotType ?? OT2_ROBOT_TYPE const deckConfig = getSimplestDeckConfigForProtocol(protocolAnalysis) const deckDef = getDeckDefFromRobotType(robotType) - const labwareDefinitionsByUri = useMemo( - () => - protocolAnalysis != null - ? getLoadedLabwareDefinitionsByUri(protocolAnalysis?.commands) - : null, - [protocolAnalysis] - ) - const currentModulesInfo = useMemo( () => getRunCurrentModulesInfo({ @@ -93,6 +102,35 @@ export function useDeckMapUtils({ [runId, protocolAnalysis, runRecord, deckDef, failedLabwareUtils] ) + const movedLabwareDef = + labwareDefinitionsByUri != null && failedLabwareUtils.failedLabware != null + ? labwareDefinitionsByUri[failedLabwareUtils.failedLabware.definitionUri] + : null + + const moduleRenderInfo = useMemo( + () => + runRecord != null && labwareDefinitionsByUri != null + ? getRunModuleRenderInfo( + runRecord.data, + deckDef, + labwareDefinitionsByUri + ) + : [], + [deckDef, labwareDefinitionsByUri, runRecord] + ) + + const labwareRenderInfo = useMemo( + () => + runRecord != null && labwareDefinitionsByUri != null + ? getRunLabwareRenderInfo( + runRecord.data, + labwareDefinitionsByUri, + deckDef + ) + : [], + [deckDef, labwareDefinitionsByUri, runRecord] + ) + return { deckConfig, modulesOnDeck: runCurrentModules.map( @@ -112,6 +150,11 @@ export function useDeckMapUtils({ .filter(maybeSlot => maybeSlot != null) as string[], kind: 'intervention', robotType, + loadedModules: runRecord?.data.modules ?? [], + loadedLabware: runRecord?.data.labware ?? [], + movedLabwareDef, + moduleRenderInfo, + labwareRenderInfo, } } diff --git a/app/src/organisms/ErrorRecoveryFlows/hooks/useERUtils.ts b/app/src/organisms/ErrorRecoveryFlows/hooks/useERUtils.ts index 365bf01de36..57691a30e55 100644 --- a/app/src/organisms/ErrorRecoveryFlows/hooks/useERUtils.ts +++ b/app/src/organisms/ErrorRecoveryFlows/hooks/useERUtils.ts @@ -20,7 +20,11 @@ import { useShowDoorInfo } from './useShowDoorInfo' import { useCleanupRecoveryState } from './useCleanupRecoveryState' import { useFailedPipetteUtils } from './useFailedPipetteUtils' -import type { LabwareDefinition2, RobotType } from '@opentrons/shared-data' +import type { + LabwareDefinition2, + LabwareDefinitionsByUri, + RobotType, +} from '@opentrons/shared-data' import type { IRecoveryMap, RouteStep, RecoveryRoute } from '../types' import type { ErrorRecoveryFlowsProps } from '..' import type { UseRouteUpdateActionsResult } from './useRouteUpdateActions' @@ -48,6 +52,7 @@ export type ERUtilsProps = Omit & { failedCommand: ReturnType showTakeover: boolean allRunDefs: LabwareDefinition2[] + labwareDefinitionsByUri: LabwareDefinitionsByUri | null } export interface ERUtilsResults { @@ -82,6 +87,7 @@ export function useERUtils({ runStatus, showTakeover, allRunDefs, + labwareDefinitionsByUri, }: ERUtilsProps): ERUtilsResults { const { data: attachedInstruments } = useInstrumentsQuery() const { data: runRecord } = useNotifyRunQuery(runId) @@ -151,7 +157,6 @@ export function useERUtils({ failedPipetteInfo, runRecord, runCommands, - allRunDefs, }) const recoveryCommands = useRecoveryCommands({ @@ -169,6 +174,7 @@ export function useERUtils({ runRecord, protocolAnalysis, failedLabwareUtils, + labwareDefinitionsByUri, }) const recoveryActionMutationUtils = useRecoveryActionMutation( diff --git a/app/src/organisms/ErrorRecoveryFlows/hooks/useFailedLabwareUtils.ts b/app/src/organisms/ErrorRecoveryFlows/hooks/useFailedLabwareUtils.ts index c1925a752c3..d108bfb7d0a 100644 --- a/app/src/organisms/ErrorRecoveryFlows/hooks/useFailedLabwareUtils.ts +++ b/app/src/organisms/ErrorRecoveryFlows/hooks/useFailedLabwareUtils.ts @@ -28,6 +28,7 @@ import type { MoveLabwareRunTimeCommand, LabwareLocation, } from '@opentrons/shared-data' +import type { LabwareDisplayLocationSlotOnly } from '/app/local-resources/labware' import type { ErrorRecoveryFlowsProps } from '..' import type { ERUtilsProps } from './useERUtils' @@ -35,14 +36,15 @@ interface UseFailedLabwareUtilsProps { failedCommandByRunRecord: ERUtilsProps['failedCommandByRunRecord'] protocolAnalysis: ErrorRecoveryFlowsProps['protocolAnalysis'] failedPipetteInfo: PipetteData | null - allRunDefs: LabwareDefinition2[] runCommands?: CommandsData runRecord?: Run } interface RelevantFailedLabwareLocations { - currentLoc: string - newLoc: string | null + displayNameCurrentLoc: string + displayNameNewLoc: string | null + currentLoc: LabwareLocation | null + newLoc: LabwareLocation | null } export type UseFailedLabwareUtilsResult = UseTipSelectionUtilsResult & { @@ -54,6 +56,7 @@ export type UseFailedLabwareUtilsResult = UseTipSelectionUtilsResult & { relevantWellName: string | null /* The user-content nickname of the failed labware, if any */ failedLabwareNickname: string | null + /* Details relating to the labware location. */ failedLabwareLocations: RelevantFailedLabwareLocations } @@ -69,7 +72,6 @@ export function useFailedLabwareUtils({ failedPipetteInfo, runCommands, runRecord, - allRunDefs, }: UseFailedLabwareUtilsProps): UseFailedLabwareUtilsResult { const recentRelevantFailedLabwareCmd = useMemo( () => @@ -105,8 +107,7 @@ export function useFailedLabwareUtils({ const failedLabwareLocations = useRelevantFailedLwLocations({ failedLabware, failedCommandByRunRecord, - protocolAnalysis, - allRunDefs, + runRecord, }) return { @@ -337,9 +338,9 @@ export function getRelevantWellName( } } -type GetRelevantLwLocationsParams = Pick< +export type GetRelevantLwLocationsParams = Pick< UseFailedLabwareUtilsProps, - 'protocolAnalysis' | 'failedCommandByRunRecord' | 'allRunDefs' + 'runRecord' | 'failedCommandByRunRecord' > & { failedLabware: UseFailedLabwareUtilsResult['failedLabware'] } @@ -347,41 +348,51 @@ type GetRelevantLwLocationsParams = Pick< export function useRelevantFailedLwLocations({ failedLabware, failedCommandByRunRecord, - protocolAnalysis, - allRunDefs, + runRecord, }: GetRelevantLwLocationsParams): RelevantFailedLabwareLocations { const { t } = useTranslation('protocol_command_text') - const currentLocation = getLabwareDisplayLocation({ - loadedLabwares: protocolAnalysis?.labware ?? [], - loadedModules: protocolAnalysis?.modules ?? [], - location: failedLabware?.location ?? null, - allRunDefs, + const BASE_DISPLAY_PARAMS: Omit< + LabwareDisplayLocationSlotOnly, + 'location' + > = { + loadedLabwares: runRecord?.data?.labware ?? [], + loadedModules: runRecord?.data?.modules ?? [], robotType: FLEX_ROBOT_TYPE, t, + detailLevel: 'slot-only', + isOnDevice: false, // Always return the "slot XYZ" copy, which is the desktop copy. + } + + const displayNameCurrentLoc = getLabwareDisplayLocation({ + ...BASE_DISPLAY_PARAMS, + location: failedLabware?.location ?? null, }) - const getNewLocation = (): LabwareLocation | null => { + const getNewLocation = (): Pick< + RelevantFailedLabwareLocations, + 'displayNameNewLoc' | 'newLoc' + > => { switch (failedCommandByRunRecord?.commandType) { case 'moveLabware': - return failedCommandByRunRecord.params.newLocation + return { + displayNameNewLoc: getLabwareDisplayLocation({ + ...BASE_DISPLAY_PARAMS, + location: failedCommandByRunRecord.params.newLocation, + }), + newLoc: failedCommandByRunRecord.params.newLocation, + } default: - return null + return { + displayNameNewLoc: null, + newLoc: null, + } } } - const newLocationByDisplayName = getLabwareDisplayLocation({ - loadedLabwares: protocolAnalysis?.labware ?? [], - loadedModules: protocolAnalysis?.modules ?? [], - location: getNewLocation(), - allRunDefs, - robotType: FLEX_ROBOT_TYPE, - t, - }) - return { - currentLoc: currentLocation, - newLoc: - newLocationByDisplayName.length === 0 ? null : newLocationByDisplayName, + displayNameCurrentLoc, + currentLoc: failedLabware?.location ?? null, + ...getNewLocation(), } } diff --git a/app/src/organisms/ErrorRecoveryFlows/index.tsx b/app/src/organisms/ErrorRecoveryFlows/index.tsx index 2bd26beb747..124c4fea65f 100644 --- a/app/src/organisms/ErrorRecoveryFlows/index.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/index.tsx @@ -13,11 +13,13 @@ import { RUN_STATUS_STOP_REQUESTED, RUN_STATUS_SUCCEEDED, } from '@opentrons/api-client' -import { OT2_ROBOT_TYPE } from '@opentrons/shared-data' +import { + getLoadedLabwareDefinitionsByUri, + OT2_ROBOT_TYPE, +} from '@opentrons/shared-data' import { useHost } from '@opentrons/react-api-client' import { getIsOnDevice } from '/app/redux/config' -import { getLabwareDefinitionsFromCommands } from '/app/local-resources/labware' import { ErrorRecoveryWizard, useERWizard } from './ErrorRecoveryWizard' import { RecoverySplash, useRecoverySplash } from './RecoverySplash' import { RecoveryTakeover } from './RecoveryTakeover' @@ -127,13 +129,19 @@ export function ErrorRecoveryFlows( const robotName = useHost()?.robotName ?? 'robot' const isValidRobotSideAnalysis = protocolAnalysis != null - const allRunDefs = useMemo( + + // TODO(jh, 10-22-24): EXEC-769. + const labwareDefinitionsByUri = useMemo( () => protocolAnalysis != null - ? getLabwareDefinitionsFromCommands(protocolAnalysis.commands) - : [], + ? getLoadedLabwareDefinitionsByUri(protocolAnalysis?.commands) + : null, [isValidRobotSideAnalysis] ) + const allRunDefs = + labwareDefinitionsByUri != null + ? Object.values(labwareDefinitionsByUri) + : [] const { showTakeover, @@ -151,6 +159,7 @@ export function ErrorRecoveryFlows( showTakeover, failedCommand: failedCommandBySource, allRunDefs, + labwareDefinitionsByUri, }) const renderWizard = diff --git a/app/src/organisms/ErrorRecoveryFlows/shared/LeftColumnLabwareInfo.tsx b/app/src/organisms/ErrorRecoveryFlows/shared/LeftColumnLabwareInfo.tsx index ad1e7b0bc4a..87cdac57255 100644 --- a/app/src/organisms/ErrorRecoveryFlows/shared/LeftColumnLabwareInfo.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/shared/LeftColumnLabwareInfo.tsx @@ -22,12 +22,14 @@ export function LeftColumnLabwareInfo({ failedLabwareNickname, failedLabwareLocations, } = failedLabwareUtils - const { newLoc, currentLoc } = failedLabwareLocations + const { displayNameNewLoc, displayNameCurrentLoc } = failedLabwareLocations const buildNewLocation = (): React.ComponentProps< typeof InterventionContent >['infoProps']['newLocationProps'] => - newLoc != null ? { deckLabel: newLoc.toUpperCase() } : undefined + displayNameNewLoc != null + ? { deckLabel: displayNameNewLoc.toUpperCase() } + : undefined return ( { + switch (selectedRecoveryOption) { + case MANUAL_MOVE_AND_SKIP.ROUTE: { + const { newLoc, currentLoc } = failedLabwareUtils.failedLabwareLocations + const { + movedLabwareDef, + moduleRenderInfo, + labwareRenderInfo, + ...restUtils + } = deckMapUtils + + const failedLwId = failedLabware?.id ?? '' + + const isValidDeck = + currentLoc != null && newLoc != null && movedLabwareDef != null + + return isValidDeck ? ( + + {moduleRenderInfo.map( + ({ + x, + y, + moduleId, + moduleDef, + nestedLabwareDef, + nestedLabwareId, + }) => ( + + {nestedLabwareDef != null && + nestedLabwareId !== failedLwId ? ( + + ) : null} + + ) + )} + {labwareRenderInfo + .filter(l => l.labwareId !== failedLwId) + .map(({ x, y, labwareDef, labwareId }) => ( + + {labwareDef != null && labwareId !== failedLwId ? ( + + ) : null} + + ))} + + } + /> + ) : ( + + ) + } + default: + return + } + } + return ( @@ -109,9 +187,7 @@ export function TwoColLwInfoAndDeck( type={buildType()} bannerText={buildBannerText()} /> - - - + {buildDeckView()} diff --git a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/LeftColumnLabwareInfo.test.tsx b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/LeftColumnLabwareInfo.test.tsx index e2e6c268ef8..f38e1e06922 100644 --- a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/LeftColumnLabwareInfo.test.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/LeftColumnLabwareInfo.test.tsx @@ -27,8 +27,8 @@ describe('LeftColumnLabwareInfo', () => { failedLabwareName: 'MOCK_LW_NAME', failedLabwareNickname: 'MOCK_LW_NICKNAME', failedLabwareLocations: { - currentLoc: 'slot A1', - newLoc: 'slot B2', + displayNameCurrentLoc: 'slot A1', + displayNameNewLoc: 'slot B2', }, } as any, type: 'location', @@ -76,7 +76,7 @@ describe('LeftColumnLabwareInfo', () => { }) it('does not include newLocationProps when newLoc is not provided', () => { - props.failedLabwareUtils.failedLabwareLocations.newLoc = null + props.failedLabwareUtils.failedLabwareLocations.displayNameNewLoc = null render(props) expect(vi.mocked(InterventionContent)).toHaveBeenCalledWith( @@ -91,9 +91,12 @@ describe('LeftColumnLabwareInfo', () => { it('converts location labels to uppercase', () => { props.failedLabwareUtils.failedLabwareLocations = { - currentLoc: 'slot A1', - newLoc: 'slot B2', + displayNameCurrentLoc: 'slot A1', + displayNameNewLoc: 'slot B2', + newLoc: {} as any, + currentLoc: {} as any, } + render(props) expect(vi.mocked(InterventionContent)).toHaveBeenCalledWith( diff --git a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/SelectTips.test.tsx b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/SelectTips.test.tsx index 9a8fc10f5d6..08db6269c4d 100644 --- a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/SelectTips.test.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/SelectTips.test.tsx @@ -53,7 +53,10 @@ describe('SelectTips', () => { failedLabwareUtils: { selectedTipLocations: { A1: null }, areTipsSelected: true, - failedLabwareLocations: { newLoc: null, currentLoc: 'A1' }, + failedLabwareLocations: { + displayNameNewLoc: null, + displayNameCurrentLoc: 'A1', + }, } as any, } @@ -161,7 +164,10 @@ describe('SelectTips', () => { failedLabwareUtils: { selectedTipLocations: null, areTipsSelected: false, - failedLabwareLocations: { newLoc: null, currentLoc: '' }, + failedLabwareLocations: { + displayNameNewLoc: null, + displayNameCurrentLoc: '', + }, } as any, } diff --git a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/TwoColLwInfoAndDeck.test.tsx b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/TwoColLwInfoAndDeck.test.tsx index f2206c8f010..2f24fc0f3bb 100644 --- a/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/TwoColLwInfoAndDeck.test.tsx +++ b/app/src/organisms/ErrorRecoveryFlows/shared/__tests__/TwoColLwInfoAndDeck.test.tsx @@ -1,4 +1,7 @@ import { describe, it, vi, expect, beforeEach } from 'vitest' +import { screen } from '@testing-library/react' + +import { MoveLabwareOnDeck } from '@opentrons/components' import { renderWithProviders } from '/app/__testing-utils__' import { i18n } from '/app/i18n' @@ -11,6 +14,13 @@ import { getSlotNameAndLwLocFrom } from '../../hooks/useDeckMapUtils' import type * as React from 'react' import type { Mock } from 'vitest' +vi.mock('@opentrons/components', async () => { + const actual = await vi.importActual('@opentrons/components') + return { + ...actual, + MoveLabwareOnDeck: vi.fn(), + } +}) vi.mock('../LeftColumnLabwareInfo') vi.mock('../../hooks/useDeckMapUtils') @@ -39,11 +49,17 @@ describe('TwoColLwInfoAndDeck', () => { failedLabwareUtils: { relevantWellName: 'A1', failedLabware: { location: 'C1' }, + failedLabwareLocations: { newLoc: {}, currentLoc: {} }, + }, + deckMapUtils: { + movedLabwareDef: {}, + moduleRenderInfo: [], + labwareRenderInfo: [], }, - deckMapUtils: {}, currentRecoveryOptionUtils: { selectedRecoveryOption: RECOVERY_MAP.MANUAL_MOVE_AND_SKIP.ROUTE, }, + isOnDevice: true, } as any vi.mocked(LeftColumnLabwareInfo).mockReturnValue( @@ -131,4 +147,34 @@ describe('TwoColLwInfoAndDeck', () => { expect.anything() ) }) + + it(`renders a move labware on deck view if the selected recovery option is ${RECOVERY_MAP.MANUAL_MOVE_AND_SKIP.ROUTE} and props are valid`, () => { + vi.mocked(MoveLabwareOnDeck).mockReturnValue( +
MOCK_MOVE_LW_ON_DECK
+ ) + + props.currentRecoveryOptionUtils.selectedRecoveryOption = + RECOVERY_MAP.MANUAL_MOVE_AND_SKIP.ROUTE + render(props) + + screen.getByText('MOCK_MOVE_LW_ON_DECK') + }) + + it(`does not render a move labware on deck view if the selected recovery option is ${RECOVERY_MAP.MANUAL_MOVE_AND_SKIP.ROUTE} and props are invalid`, () => { + vi.mocked(MoveLabwareOnDeck).mockReturnValue( +
MOCK_MOVE_LW_ON_DECK
+ ) + + props.currentRecoveryOptionUtils.selectedRecoveryOption = + RECOVERY_MAP.MANUAL_MOVE_AND_SKIP.ROUTE + props.deckMapUtils = { + movedLabwareDef: null, + moduleRenderInfo: null, + labwareRenderInfo: null, + } as any + + render(props) + + expect(screen.queryByText('MOCK_MOVE_LW_ON_DECK')).not.toBeInTheDocument() + }) }) diff --git a/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts b/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts index 1aa7080a52a..fb9eea82c63 100644 --- a/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts +++ b/app/src/organisms/ErrorRecoveryFlows/utils/__tests__/getErrorKind.test.ts @@ -7,6 +7,11 @@ import type { RunCommandError, RunTimeCommand } from '@opentrons/shared-data' describe('getErrorKind', () => { it.each([ + { + commandType: 'prepareToAspirate', + errorType: DEFINED_ERROR_TYPES.OVERPRESSURE, + expectedError: ERROR_KINDS.OVERPRESSURE_PREPARE_TO_ASPIRATE, + }, { commandType: 'aspirate', errorType: DEFINED_ERROR_TYPES.OVERPRESSURE, diff --git a/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts b/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts index a537c3cf295..30fc4783473 100644 --- a/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts +++ b/app/src/organisms/ErrorRecoveryFlows/utils/getErrorKind.ts @@ -13,9 +13,12 @@ export function getErrorKind(failedCommand: RunTimeCommand | null): ErrorKind { const errorType = failedCommand?.error?.errorType if (errorIsDefined) { - // todo(mm, 2024-07-02): Also handle aspirateInPlace and dispenseInPlace. - // https://opentrons.atlassian.net/browse/EXEC-593 if ( + commandType === 'prepareToAspirate' && + errorType === DEFINED_ERROR_TYPES.OVERPRESSURE + ) { + return ERROR_KINDS.OVERPRESSURE_PREPARE_TO_ASPIRATE + } else if ( (commandType === 'aspirate' || commandType === 'aspirateInPlace') && errorType === DEFINED_ERROR_TYPES.OVERPRESSURE ) { diff --git a/app/src/redux/config/actions.ts b/app/src/redux/config/actions.ts index e0a6906b17f..915fce0a8f0 100644 --- a/app/src/redux/config/actions.ts +++ b/app/src/redux/config/actions.ts @@ -55,6 +55,7 @@ export const configInitialized = ( ): Types.ConfigInitializedAction => ({ type: Constants.INITIALIZED, payload: { config }, + meta: { shell: true }, }) // config value has been updated @@ -64,6 +65,7 @@ export const configValueUpdated = ( ): Types.ConfigValueUpdatedAction => ({ type: Constants.VALUE_UPDATED, payload: { path, value }, + meta: { shell: true }, }) export function toggleDevtools(): Types.ToggleConfigValueAction { diff --git a/app/src/redux/config/types.ts b/app/src/redux/config/types.ts index b408a2204e2..5d6b4b83ac9 100644 --- a/app/src/redux/config/types.ts +++ b/app/src/redux/config/types.ts @@ -16,11 +16,13 @@ export type ConfigState = Config | null export interface ConfigInitializedAction { type: typeof INITIALIZED payload: { config: Config } + meta: { shell: true } } export interface ConfigValueUpdatedAction { type: typeof VALUE_UPDATED payload: { path: string; value: any } + meta: { shell: true } } export interface UpdateConfigValueAction { diff --git a/app/src/redux/shell/update.ts b/app/src/redux/shell/update.ts index 7c9e3be1f58..aa5fb601840 100644 --- a/app/src/redux/shell/update.ts +++ b/app/src/redux/shell/update.ts @@ -3,11 +3,7 @@ import { createSelector } from 'reselect' import type { State } from '../types' -import type { - ShellUpdateAction, - ShellUpdateState, - RobotMassStorageDeviceEnumerated, -} from './types' +import type { ShellUpdateAction, ShellUpdateState } from './types' // command sent to app-shell via meta.shell === true export function checkShellUpdate(): ShellUpdateAction { @@ -37,16 +33,3 @@ export const getAvailableShellUpdate: ( ) => string | null = createSelector(getShellUpdateState, state => state.available && state.info ? state.info.version : null ) - -export function checkMassStorage( - state: State -): RobotMassStorageDeviceEnumerated { - return { - type: 'shell:ROBOT_MASS_STORAGE_DEVICE_ENUMERATED', - payload: { - rootPath: '', - filePaths: state.shell.filePaths, - }, - meta: { shell: true }, - } -} diff --git a/hardware-testing/hardware_testing/examples/capacitive_probe_ot3.py b/hardware-testing/hardware_testing/examples/capacitive_probe_ot3.py index c3bdfd588e7..e0306a25779 100644 --- a/hardware-testing/hardware_testing/examples/capacitive_probe_ot3.py +++ b/hardware-testing/hardware_testing/examples/capacitive_probe_ot3.py @@ -2,7 +2,7 @@ import argparse import asyncio -from opentrons.config.types import CapacitivePassSettings, OutputOptions +from opentrons.config.types import CapacitivePassSettings from opentrons.hardware_control.ot3api import OT3API from hardware_testing.opentrons_api import types @@ -44,14 +44,12 @@ max_overrun_distance_mm=3, speed_mm_per_s=1, sensor_threshold_pf=STABLE_CAP_PF, - output_option=OutputOptions.sync_only, ) PROBE_SETTINGS_XY_AXIS = CapacitivePassSettings( prep_distance_mm=CUTOUT_SIZE / 2, max_overrun_distance_mm=3, speed_mm_per_s=1, sensor_threshold_pf=STABLE_CAP_PF, - output_option=OutputOptions.sync_only, ) diff --git a/hardware-testing/hardware_testing/examples/capacitive_probe_ot3_tunable.py b/hardware-testing/hardware_testing/examples/capacitive_probe_ot3_tunable.py index 5b14e88bc12..0fe1f693246 100644 --- a/hardware-testing/hardware_testing/examples/capacitive_probe_ot3_tunable.py +++ b/hardware-testing/hardware_testing/examples/capacitive_probe_ot3_tunable.py @@ -2,9 +2,8 @@ import argparse import asyncio -from opentrons.config.types import CapacitivePassSettings, OutputOptions +from opentrons.config.types import CapacitivePassSettings from opentrons.hardware_control.ot3api import OT3API -from opentrons.hardware_control.types import InstrumentProbeType from hardware_testing.opentrons_api import types from hardware_testing.opentrons_api import helpers_ot3 @@ -46,15 +45,12 @@ max_overrun_distance_mm=3, speed_mm_per_s=1, sensor_threshold_pf=CAP_REL_THRESHOLD_PF, - output_option=OutputOptions.sync_only, ) PROBE_SETTINGS_Z_AXIS_OUTPUT = CapacitivePassSettings( prep_distance_mm=10, max_overrun_distance_mm=3, speed_mm_per_s=1, sensor_threshold_pf=CAP_REL_THRESHOLD_PF, - output_option=OutputOptions.sync_buffer_to_csv, - data_files={InstrumentProbeType.PRIMARY: "/data/capacitive_sensor_data.csv"}, ) diff --git a/hardware-testing/hardware_testing/gravimetric/config.py b/hardware-testing/hardware_testing/gravimetric/config.py index b783908d5e6..304087748d1 100644 --- a/hardware-testing/hardware_testing/gravimetric/config.py +++ b/hardware-testing/hardware_testing/gravimetric/config.py @@ -3,9 +3,8 @@ from typing import List, Dict, Tuple from typing_extensions import Final from enum import Enum -from opentrons.config.types import LiquidProbeSettings, OutputOptions +from opentrons.config.types import LiquidProbeSettings from opentrons.protocol_api.labware import Well -from opentrons.hardware_control.types import InstrumentProbeType class ConfigType(Enum): @@ -170,13 +169,11 @@ def _get_liquid_probe_settings( plunger_speed=lqid_cfg["plunger_speed"], plunger_impulse_time=0.2, sensor_threshold_pascals=lqid_cfg["sensor_threshold_pascals"], - output_option=OutputOptions.sync_only, aspirate_while_sensing=False, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files={InstrumentProbeType.PRIMARY: "/data/testing_data/pressure.csv"}, ) diff --git a/hardware-testing/hardware_testing/liquid_sense/execute.py b/hardware-testing/hardware_testing/liquid_sense/execute.py index 01cb0d27375..001abdaa82f 100644 --- a/hardware-testing/hardware_testing/liquid_sense/execute.py +++ b/hardware-testing/hardware_testing/liquid_sense/execute.py @@ -4,7 +4,7 @@ from enum import Enum from typing import Dict, Any, List, Tuple, Optional from .report import store_tip_results, store_trial, store_baseline_trial -from opentrons.config.types import LiquidProbeSettings, OutputOptions +from opentrons.config.types import LiquidProbeSettings from .__main__ import RunArgs from hardware_testing.gravimetric.workarounds import get_sync_hw_api from hardware_testing.gravimetric.helpers import ( @@ -445,13 +445,11 @@ def _run_trial( plunger_speed=plunger_speed, plunger_impulse_time=0.2, sensor_threshold_pascals=lqid_cfg["sensor_threshold_pascals"], - output_option=OutputOptions.sync_buffer_to_csv, aspirate_while_sensing=run_args.aspirate, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files=data_files, ) hw_mount = OT3Mount.LEFT if run_args.pipette.mount == "left" else OT3Mount.RIGHT diff --git a/hardware-testing/hardware_testing/production_qc/pipette_assembly_qc_ot3/__main__.py b/hardware-testing/hardware_testing/production_qc/pipette_assembly_qc_ot3/__main__.py index 139074ed0a1..90637e81540 100644 --- a/hardware-testing/hardware_testing/production_qc/pipette_assembly_qc_ot3/__main__.py +++ b/hardware-testing/hardware_testing/production_qc/pipette_assembly_qc_ot3/__main__.py @@ -18,7 +18,7 @@ from opentrons_hardware.firmware_bindings.messages.messages import MessageDefinition from opentrons_hardware.firmware_bindings.constants import SensorType, SensorId -from opentrons.config.types import LiquidProbeSettings, OutputOptions +from opentrons.config.types import LiquidProbeSettings from opentrons.hardware_control.types import ( TipStateType, FailedTipStateCheck, @@ -1378,13 +1378,11 @@ async def _test_liquid_probe( plunger_speed=probe_cfg.plunger_speed, plunger_impulse_time=0.2, sensor_threshold_pascals=probe_cfg.sensor_threshold_pascals, - output_option=OutputOptions.can_bus_only, # FIXME: remove aspirate_while_sensing=False, z_overlap_between_passes_mm=0.1, plunger_reset_offset=2.0, samples_for_baselining=20, sample_time_sec=0.004, - data_files=None, ) end_z = await api.liquid_probe( mount, max_z_distance_machine_coords, probe_settings, probe=probe diff --git a/hardware-testing/hardware_testing/production_qc/robot_assembly_qc_ot3/test_instruments.py b/hardware-testing/hardware_testing/production_qc/robot_assembly_qc_ot3/test_instruments.py index 994dbf4ea99..45c1a7cc9c3 100644 --- a/hardware-testing/hardware_testing/production_qc/robot_assembly_qc_ot3/test_instruments.py +++ b/hardware-testing/hardware_testing/production_qc/robot_assembly_qc_ot3/test_instruments.py @@ -1,7 +1,7 @@ """Test Instruments.""" from typing import List, Tuple, Optional, Union -from opentrons.config.types import CapacitivePassSettings, OutputOptions +from opentrons.config.types import CapacitivePassSettings from opentrons.hardware_control.ot3api import OT3API from hardware_testing.data.csv_report import ( @@ -30,7 +30,6 @@ max_overrun_distance_mm=0, speed_mm_per_s=Z_PROBE_DISTANCE_MM / Z_PROBE_TIME_SECONDS, sensor_threshold_pf=1.0, - output_option=OutputOptions.can_bus_only, ) RELATIVE_MOVE_FROM_HOME_DELTA = Point(x=-500, y=-300) diff --git a/hardware-testing/hardware_testing/scripts/ABRAsairScript.py b/hardware-testing/hardware_testing/scripts/ABRAsairScript.py index 2324e330dc7..8eea871b9a3 100644 --- a/hardware-testing/hardware_testing/scripts/ABRAsairScript.py +++ b/hardware-testing/hardware_testing/scripts/ABRAsairScript.py @@ -3,7 +3,7 @@ import paramiko as pmk import time import multiprocessing -from typing import Optional, List +from typing import Optional, List, Any def execute(client: pmk.SSHClient, command: str, args: list) -> Optional[int]: @@ -15,19 +15,8 @@ def execute(client: pmk.SSHClient, command: str, args: list) -> Optional[int]: stdin, stdout, stderr = client.exec_command(command, get_pty=True) stdout_lines: List[str] = [] stderr_lines: List[str] = [] - time.sleep(15) + time.sleep(25) - # check stdout, stderr - - # Check the exit status of the command. - # while not stdout.channel.exit_status_ready(): - # if stdout.channel.recv_ready(): - # stdout_lines = stdout.readlines() - # print(f"{args[0]} output:", "".join(stdout_lines)) - # if stderr.channel.recv_ready(): - # stderr_lines = stderr.readlines() - # print(f"{args[0]} ERROR:", "".join(stdout_lines)) - # return 1 if stderr.channel.recv_ready: stderr_lines = stderr.readlines() if stderr_lines != []: @@ -58,24 +47,9 @@ def connect_ssh(ip: str) -> pmk.SSHClient: return client -# Load Robot IPs -file_name = sys.argv[1] -robot_ips = [] -robot_names = [] - -with open(file_name) as file: - for line in file.readlines(): - info = line.split(",") - if "Y" in info[2]: - robot_ips.append(info[0]) - robot_names.append(info[1]) - -cmd = "nohup python3 -m hardware_testing.scripts.abr_asair_sensor {name} {duration} {frequency}" -cd = "cd /opt/opentrons-robot-server && " -print("Executing Script on All Robots:") - - -def run_command_on_ip(index: int) -> None: +def run_command_on_ip( + index: int, robot_ips: List[str], robot_names: List[str], cd: str, cmd: str +) -> None: """Execute ssh command and start abr_asair script on the specified robot.""" curr_ip = robot_ips[index] try: @@ -87,15 +61,35 @@ def run_command_on_ip(index: int) -> None: print(f"Error running command on {curr_ip}: {e}") -# Launch the processes for each robot. -processes = [] -for index in range(len(robot_ips)): - process = multiprocessing.Process(target=run_command_on_ip, args=(index,)) - processes.append(process) +def run(file_name: str) -> List[Any]: + """Run asair script module.""" + # Load Robot IPs + cmd = "nohup python3 -m hardware_testing.scripts.abr_asair_sensor {name} {duration} {frequency}" + cd = "cd /opt/opentrons-robot-server && " + robot_ips = [] + robot_names = [] + with open(file_name) as file: + for line in file.readlines(): + info = line.split(",") + if "Y" in info[2]: + robot_ips.append(info[0]) + robot_names.append(info[1]) + print("Executing Script on All Robots:") + # Launch the processes for each robot. + processes = [] + for index in range(len(robot_ips)): + process = multiprocessing.Process( + target=run_command_on_ip, args=(index, robot_ips, robot_names, cd, cmd) + ) + processes.append(process) + return processes if __name__ == "__main__": # Wait for all processes to finish. + file_name = sys.argv[1] + processes = run(file_name) + for process in processes: process.start() time.sleep(20) diff --git a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py index 1e8fca0358c..ba41f9399f1 100644 --- a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py +++ b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py @@ -80,7 +80,7 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None: break # write to google sheet try: - if google_sheet.creditals.access_token_expired: + if google_sheet.credentials.access_token_expired: google_sheet.gc.login() google_sheet.write_header(header) google_sheet.update_row_index() diff --git a/hardware-testing/hardware_testing/scripts/gripper_ot3.py b/hardware-testing/hardware_testing/scripts/gripper_ot3.py index 511ea11809d..cd131b8f13a 100644 --- a/hardware-testing/hardware_testing/scripts/gripper_ot3.py +++ b/hardware-testing/hardware_testing/scripts/gripper_ot3.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from typing import Optional, List, Any, Dict -from opentrons.config.defaults_ot3 import CapacitivePassSettings, OutputOptions +from opentrons.config.defaults_ot3 import CapacitivePassSettings from opentrons.hardware_control.ot3api import OT3API from hardware_testing.opentrons_api import types @@ -73,7 +73,6 @@ max_overrun_distance_mm=1, speed_mm_per_s=1, sensor_threshold_pf=0.5, - output_option=OutputOptions.sync_only, ) LABWARE_PROBE_CORNER_TOP_LEFT_XY = { "plate": Point(x=5, y=-5), diff --git a/hardware/opentrons_hardware/firmware_bindings/messages/messages.py b/hardware/opentrons_hardware/firmware_bindings/messages/messages.py index 0249ddec69e..35683bc1afb 100644 --- a/hardware/opentrons_hardware/firmware_bindings/messages/messages.py +++ b/hardware/opentrons_hardware/firmware_bindings/messages/messages.py @@ -74,6 +74,7 @@ defs.BaselineSensorResponse, defs.SetSensorThresholdRequest, defs.ReadFromSensorResponse, + defs.BatchReadFromSensorResponse, defs.SensorThresholdResponse, defs.SensorDiagnosticRequest, defs.SensorDiagnosticResponse, diff --git a/hardware/opentrons_hardware/hardware_control/tool_sensors.py b/hardware/opentrons_hardware/hardware_control/tool_sensors.py index 173a8c2738b..95076f01c1c 100644 --- a/hardware/opentrons_hardware/hardware_control/tool_sensors.py +++ b/hardware/opentrons_hardware/hardware_control/tool_sensors.py @@ -1,5 +1,6 @@ """Functions for commanding motion limited by tool sensors.""" import asyncio +from contextlib import AsyncExitStack from functools import partial from typing import ( Union, @@ -11,6 +12,7 @@ AsyncContextManager, Optional, AsyncIterator, + Mapping, ) from logging import getLogger from numpy import float64 @@ -41,6 +43,7 @@ from opentrons_hardware.sensors.sensor_driver import SensorDriver, LogListener from opentrons_hardware.sensors.types import ( sensor_fixed_point_conversion, + SensorDataType, ) from opentrons_hardware.sensors.sensor_types import ( SensorInformation, @@ -61,28 +64,13 @@ ) LOG = getLogger(__name__) + PipetteProbeTarget = Literal[NodeId.pipette_left, NodeId.pipette_right] InstrumentProbeTarget = Union[PipetteProbeTarget, Literal[NodeId.gripper]] ProbeSensorDict = Union[ Dict[SensorId, PressureSensor], Dict[SensorId, CapacitiveSensor] ] -pressure_output_file_heading = [ - "time(s)", - "Pressure(pascals)", - "z_velocity(mm/s)", - "plunger_velocity(mm/s)", - "threshold(pascals)", -] - -capacitive_output_file_heading = [ - "time(s)", - "Capacitance(farads)", - "z_velocity(mm/s)", - "plunger_velocity(mm/s)", - "threshold(farads)", -] - def _fix_pass_step_for_buffer( move_group: MoveGroupStep, @@ -167,124 +155,6 @@ def _build_pass_step( return move_group -async def run_sync_buffer_to_csv( - messenger: CanMessenger, - mount_speed: float, - plunger_speed: float, - threshold: float, - head_node: NodeId, - move_group: MoveGroupRunner, - log_files: Dict[SensorId, str], - tool: InstrumentProbeTarget, - sensor_type: SensorType, - output_file_heading: list[str], - raise_z: Optional[MoveGroupRunner] = None, -) -> Dict[NodeId, MotorPositionStatus]: - """Runs the sensor pass move group and creates a csv file with the results.""" - sensor_metadata = [0, 0, mount_speed, plunger_speed, threshold] - positions = await move_group.run(can_messenger=messenger) - # wait a little to see the dropoff curve - await asyncio.sleep(0.15) - for sensor_id in log_files.keys(): - await messenger.ensure_send( - node_id=tool, - message=BindSensorOutputRequest( - payload=BindSensorOutputRequestPayload( - sensor=SensorTypeField(sensor_type), - sensor_id=SensorIdField(sensor_id), - binding=SensorOutputBindingField(SensorOutputBinding.none), - ) - ), - expected_nodes=[tool], - ) - if raise_z is not None: - # if probing is finished, move the head node back up before requesting the data buffer - if positions[head_node].move_ack == MoveCompleteAck.stopped_by_condition: - await raise_z.run(can_messenger=messenger) - for sensor_id in log_files.keys(): - sensor_capturer = LogListener( - mount=head_node, - data_file=log_files[sensor_id], - file_heading=output_file_heading, - sensor_metadata=sensor_metadata, - ) - async with sensor_capturer: - messenger.add_listener(sensor_capturer, None) - request = SendAccumulatedSensorDataRequest( - payload=SendAccumulatedSensorDataPayload( - sensor_id=SensorIdField(sensor_id), - sensor_type=SensorTypeField(sensor_type), - ) - ) - await messenger.send( - node_id=tool, - message=request, - ) - await sensor_capturer.wait_for_complete( - message_index=request.payload.message_index.value - ) - messenger.remove_listener(sensor_capturer) - return positions - - -async def run_stream_output_to_csv( - messenger: CanMessenger, - sensors: ProbeSensorDict, - mount_speed: float, - plunger_speed: float, - threshold: float, - head_node: NodeId, - move_group: MoveGroupRunner, - log_files: Dict[SensorId, str], - output_file_heading: list[str], -) -> Dict[NodeId, MotorPositionStatus]: - """Runs the sensor pass move group and creates a csv file with the results.""" - sensor_metadata = [0, 0, mount_speed, plunger_speed, threshold] - sensor_capturer = LogListener( - mount=head_node, - data_file=log_files[ - next(iter(log_files)) - ], # hardcode to the first file, need to think more on this - file_heading=output_file_heading, - sensor_metadata=sensor_metadata, - ) - binding = [SensorOutputBinding.sync, SensorOutputBinding.report] - binding_field = SensorOutputBindingField.from_flags(binding) - for sensor_id in sensors.keys(): - sensor_info = sensors[sensor_id].sensor - await messenger.ensure_send( - node_id=sensor_info.node_id, - message=BindSensorOutputRequest( - payload=BindSensorOutputRequestPayload( - sensor=SensorTypeField(sensor_info.sensor_type), - sensor_id=SensorIdField(sensor_info.sensor_id), - binding=binding_field, - ) - ), - expected_nodes=[sensor_info.node_id], - ) - - messenger.add_listener(sensor_capturer, None) - async with sensor_capturer: - positions = await move_group.run(can_messenger=messenger) - messenger.remove_listener(sensor_capturer) - - for sensor_id in sensors.keys(): - sensor_info = sensors[sensor_id].sensor - await messenger.ensure_send( - node_id=sensor_info.node_id, - message=BindSensorOutputRequest( - payload=BindSensorOutputRequestPayload( - sensor=SensorTypeField(sensor_info.sensor_type), - sensor_id=SensorIdField(sensor_info.sensor_id), - binding=SensorOutputBindingField(SensorOutputBinding.none), - ) - ), - expected_nodes=[sensor_info.node_id], - ) - return positions - - async def _setup_pressure_sensors( messenger: CanMessenger, sensor_id: SensorId, @@ -351,42 +221,42 @@ async def _setup_capacitive_sensors( return result -async def _run_with_binding( +async def finalize_logs( messenger: CanMessenger, - sensors: ProbeSensorDict, - sensor_runner: MoveGroupRunner, - binding: List[SensorOutputBinding], -) -> Dict[NodeId, MotorPositionStatus]: - binding_field = SensorOutputBindingField.from_flags(binding) - for sensor_id in sensors.keys(): - sensor_info = sensors[sensor_id].sensor + tool: NodeId, + listeners: Dict[SensorId, LogListener], + sensors: Mapping[SensorId, Union[CapacitiveSensor, PressureSensor]], +) -> None: + """Signal the sensors to finish sending their data and wait for it to flush out.""" + for s_id in sensors.keys(): + # Tell the sensor to stop recording await messenger.ensure_send( - node_id=sensor_info.node_id, - message=BindSensorOutputRequest( - payload=BindSensorOutputRequestPayload( - sensor=SensorTypeField(sensor_info.sensor_type), - sensor_id=SensorIdField(sensor_info.sensor_id), - binding=binding_field, - ) - ), - expected_nodes=[sensor_info.node_id], - ) - - result = await sensor_runner.run(can_messenger=messenger) - for sensor_id in sensors.keys(): - sensor_info = sensors[sensor_id].sensor - await messenger.ensure_send( - node_id=sensor_info.node_id, + node_id=tool, message=BindSensorOutputRequest( payload=BindSensorOutputRequestPayload( - sensor=SensorTypeField(sensor_info.sensor_type), - sensor_id=SensorIdField(sensor_info.sensor_id), + sensor=SensorTypeField(sensors[s_id].sensor.sensor_type), + sensor_id=SensorIdField(s_id), binding=SensorOutputBindingField(SensorOutputBinding.none), ) ), - expected_nodes=[sensor_info.node_id], + expected_nodes=[tool], ) - return result + request = SendAccumulatedSensorDataRequest( + payload=SendAccumulatedSensorDataPayload( + sensor_id=SensorIdField(s_id), + sensor_type=SensorTypeField(sensors[s_id].sensor.sensor_type), + ) + ) + # set the message index of the Ack that signals this sensor is finished sending data + listeners[s_id].set_stop_ack(request.payload.message_index.value) + # tell the sensor to clear it's queue + await messenger.send( + node_id=tool, + message=request, + ) + # wait for the data to finish sending + for listener in listeners.values(): + await listener.wait_for_complete() async def liquid_probe( @@ -399,15 +269,13 @@ async def liquid_probe( threshold_pascals: float, plunger_impulse_time: float, num_baseline_reads: int, - csv_output: bool = False, - sync_buffer_output: bool = False, - can_bus_only_output: bool = False, - data_files: Optional[Dict[SensorId, str]] = None, sensor_id: SensorId = SensorId.S0, force_both_sensors: bool = False, + response_queue: Optional[ + asyncio.Queue[Dict[SensorId, List[SensorDataType]]] + ] = None, ) -> Dict[NodeId, MotorPositionStatus]: """Move the mount and pipette simultaneously while reading from the pressure sensor.""" - log_files: Dict[SensorId, str] = {} if not data_files else data_files sensor_driver = SensorDriver() threshold_fixed_point = threshold_pascals * sensor_fixed_point_conversion sensor_binding = None @@ -420,7 +288,7 @@ async def liquid_probe( + SensorOutputBinding.report + SensorOutputBinding.multi_sensor_sync ) - pressure_sensors = await _setup_pressure_sensors( + pressure_sensors: Dict[SensorId, PressureSensor] = await _setup_pressure_sensors( messenger, sensor_id, tool, @@ -440,6 +308,7 @@ async def liquid_probe( duration=float64(plunger_impulse_time), present_nodes=[tool], ) + sensor_group = _build_pass_step( movers=[head_node, tool], distance={head_node: max_z_distance, tool: p_pass_distance}, @@ -449,64 +318,56 @@ async def liquid_probe( stop_condition=MoveStopCondition.sync_line, binding_flags=sensor_binding, ) - if sync_buffer_output: - sensor_group = _fix_pass_step_for_buffer( - sensor_group, - movers=[head_node, tool], - distance={head_node: max_z_distance, tool: p_pass_distance}, - speed={head_node: mount_speed, tool: plunger_speed}, - sensor_type=SensorType.pressure, - sensor_id=sensor_id, - stop_condition=MoveStopCondition.sync_line, - binding_flags=sensor_binding, - ) + sensor_group = _fix_pass_step_for_buffer( + sensor_group, + movers=[head_node, tool], + distance={head_node: max_z_distance, tool: p_pass_distance}, + speed={head_node: mount_speed, tool: plunger_speed}, + sensor_type=SensorType.pressure, + sensor_id=sensor_id, + stop_condition=MoveStopCondition.sync_line, + binding_flags=sensor_binding, + ) sensor_runner = MoveGroupRunner(move_groups=[[lower_plunger], [sensor_group]]) - if csv_output: - return await run_stream_output_to_csv( - messenger, - pressure_sensors, - mount_speed, - plunger_speed, - threshold_pascals, - head_node, - sensor_runner, - log_files, - pressure_output_file_heading, - ) - elif sync_buffer_output: - raise_z = create_step( - distance={head_node: float64(max_z_distance)}, - velocity={head_node: float64(-1 * mount_speed)}, - acceleration={}, - duration=float64(max_z_distance / mount_speed), - present_nodes=[head_node], - ) - raise_z_runner = MoveGroupRunner(move_groups=[[raise_z]]) - - return await run_sync_buffer_to_csv( - messenger=messenger, - mount_speed=mount_speed, - plunger_speed=plunger_speed, - threshold=threshold_pascals, - head_node=head_node, - move_group=sensor_runner, - raise_z=raise_z_runner, - log_files=log_files, - tool=tool, - sensor_type=SensorType.pressure, - output_file_heading=pressure_output_file_heading, - ) - elif can_bus_only_output: - binding = [SensorOutputBinding.sync, SensorOutputBinding.report] - return await _run_with_binding( - messenger, pressure_sensors, sensor_runner, binding - ) - else: # none - binding = [SensorOutputBinding.sync] - return await _run_with_binding( - messenger, pressure_sensors, sensor_runner, binding - ) + + raise_z = create_step( + distance={head_node: float64(max_z_distance)}, + velocity={head_node: float64(-1 * mount_speed)}, + acceleration={}, + duration=float64(max_z_distance / mount_speed), + present_nodes=[head_node], + ) + + raise_z_runner = MoveGroupRunner(move_groups=[[raise_z]]) + listeners = { + s_id: LogListener(messenger, pressure_sensors[s_id]) + for s_id in pressure_sensors.keys() + } + + LOG.info( + f"Starting LLD pass: {head_node} {sensor_id} max p distance {max_p_distance} max z distance {max_z_distance}" + ) + async with AsyncExitStack() as binding_stack: + for listener in listeners.values(): + await binding_stack.enter_async_context(listener) + positions = await sensor_runner.run(can_messenger=messenger) + if positions[head_node].move_ack == MoveCompleteAck.stopped_by_condition: + LOG.info( + f"Liquid found {head_node} motor_postion {positions[head_node].motor_position} encoder position {positions[head_node].encoder_position}" + ) + await raise_z_runner.run(can_messenger=messenger) + await finalize_logs(messenger, tool, listeners, pressure_sensors) + + # give response data to any consumer that wants it + if response_queue: + for s_id in listeners.keys(): + data = listeners[s_id].get_data() + if data: + for d in data: + response_queue.put_nowait({s_id: data}) + + return positions async def check_overpressure( @@ -536,10 +397,9 @@ async def capacitive_probe( mount_speed: float, sensor_id: SensorId = SensorId.S0, relative_threshold_pf: float = 1.0, - csv_output: bool = False, - sync_buffer_output: bool = False, - can_bus_only_output: bool = False, - data_files: Optional[Dict[SensorId, str]] = None, + response_queue: Optional[ + asyncio.Queue[dict[SensorId, list[SensorDataType]]] + ] = None, ) -> MotorPositionStatus: """Move the specified tool down until its capacitive sensor triggers. @@ -549,7 +409,6 @@ async def capacitive_probe( The direction is sgn(distance)*sgn(speed), so you can set the direction either by negating speed or negating distance. """ - log_files: Dict[SensorId, str] = {} if not data_files else data_files sensor_driver = SensorDriver() pipette_present = tool in [NodeId.pipette_left, NodeId.pipette_right] @@ -577,53 +436,36 @@ async def capacitive_probe( sensor_id=sensor_id, stop_condition=MoveStopCondition.sync_line, ) - if sync_buffer_output: - sensor_group = _fix_pass_step_for_buffer( - sensor_group, - movers=movers, - distance=probe_distance, - speed=probe_speed, - sensor_type=SensorType.capacitive, - sensor_id=sensor_id, - stop_condition=MoveStopCondition.sync_line, - ) + + sensor_group = _fix_pass_step_for_buffer( + sensor_group, + movers=movers, + distance=probe_distance, + speed=probe_speed, + sensor_type=SensorType.capacitive, + sensor_id=sensor_id, + stop_condition=MoveStopCondition.sync_line, + ) runner = MoveGroupRunner(move_groups=[[sensor_group]]) - if csv_output: - positions = await run_stream_output_to_csv( - messenger, - capacitive_sensors, - mount_speed, - 0.0, - relative_threshold_pf, - mover, - runner, - log_files, - capacitive_output_file_heading, - ) - elif sync_buffer_output: - positions = await run_sync_buffer_to_csv( - messenger, - mount_speed, - 0.0, - relative_threshold_pf, - mover, - runner, - log_files, - tool=tool, - sensor_type=SensorType.capacitive, - output_file_heading=capacitive_output_file_heading, - ) - elif can_bus_only_output: - binding = [SensorOutputBinding.sync, SensorOutputBinding.report] - positions = await _run_with_binding( - messenger, capacitive_sensors, runner, binding - ) - else: - binding = [SensorOutputBinding.sync] - positions = await _run_with_binding( - messenger, capacitive_sensors, runner, binding - ) + + listeners = { + s_id: LogListener(messenger, capacitive_sensors[s_id]) + for s_id in capacitive_sensors.keys() + } + async with AsyncExitStack() as binding_stack: + for listener in listeners.values(): + await binding_stack.enter_async_context(listener) + positions = await runner.run(can_messenger=messenger) + await finalize_logs(messenger, tool, listeners, capacitive_sensors) + + # give response data to any consumer that wants it + if response_queue: + for s_id in listeners.keys(): + data = listeners[s_id].get_data() + if data: + for d in data: + response_queue.put_nowait({s_id: data}) return positions[mover] diff --git a/hardware/opentrons_hardware/sensors/__init__.py b/hardware/opentrons_hardware/sensors/__init__.py index adc4f0c52af..3ae059861a1 100644 --- a/hardware/opentrons_hardware/sensors/__init__.py +++ b/hardware/opentrons_hardware/sensors/__init__.py @@ -1 +1,3 @@ """Sub-module for sensor drivers.""" + +SENSOR_LOG_NAME = "pipettes-sensor-log" diff --git a/hardware/opentrons_hardware/sensors/sensor_driver.py b/hardware/opentrons_hardware/sensors/sensor_driver.py index 611bc091970..0f1904f8a26 100644 --- a/hardware/opentrons_hardware/sensors/sensor_driver.py +++ b/hardware/opentrons_hardware/sensors/sensor_driver.py @@ -1,9 +1,8 @@ """Capacitve Sensor Driver Class.""" import time import asyncio -import csv -from typing import Optional, AsyncIterator, Any, Sequence +from typing import Optional, AsyncIterator, Any, Sequence, List, Union from contextlib import asynccontextmanager, suppress from logging import getLogger @@ -19,7 +18,6 @@ from opentrons_hardware.firmware_bindings.constants import ( SensorOutputBinding, SensorThresholdMode, - NodeId, ) from opentrons_hardware.sensors.types import ( SensorDataType, @@ -32,7 +30,12 @@ SensorThresholdInformation, ) -from opentrons_hardware.sensors.sensor_types import BaseSensorType, ThresholdSensorType +from opentrons_hardware.sensors.sensor_types import ( + BaseSensorType, + ThresholdSensorType, + PressureSensor, + CapacitiveSensor, +) from opentrons_hardware.firmware_bindings.messages.payloads import ( BindSensorOutputRequestPayload, ) @@ -46,8 +49,10 @@ ) from .sensor_abc import AbstractSensorDriver from .scheduler import SensorScheduler +from . import SENSOR_LOG_NAME LOG = getLogger(__name__) +SENSOR_LOG = getLogger(SENSOR_LOG_NAME) class SensorDriver(AbstractSensorDriver): @@ -226,43 +231,50 @@ class LogListener: def __init__( self, - mount: NodeId, - data_file: Any, - file_heading: Sequence[str], - sensor_metadata: Sequence[Any], + messenger: CanMessenger, + sensor: Union[PressureSensor, CapacitiveSensor], ) -> None: """Build the capturer.""" - self.csv_writer = Any - self.data_file = data_file - self.file_heading = file_heading - self.sensor_metadata = sensor_metadata - self.response_queue: asyncio.Queue[float] = asyncio.Queue() - self.mount = mount + self.response_queue: asyncio.Queue[SensorDataType] = asyncio.Queue() + self.tool = sensor.sensor.node_id self.start_time = 0.0 self.event: Any = None + self.messenger = messenger + self.sensor = sensor + self.type = sensor.sensor.sensor_type + self.id = sensor.sensor.sensor_id - async def __aenter__(self) -> None: - """Create a csv heading for logging pressure readings.""" - self.data_file = open(self.data_file, "w") - self.csv_writer = csv.writer(self.data_file) - self.csv_writer.writerows([self.file_heading, self.sensor_metadata]) + def get_data(self) -> Optional[List[SensorDataType]]: + """Return the sensor data captured by this listener.""" + if self.response_queue.empty(): + return None + data: List[SensorDataType] = [] + while not self.response_queue.empty(): + data.append(self.response_queue.get_nowait()) + return data + async def __aenter__(self) -> None: + """Start logging sensor readings.""" + self.messenger.add_listener(self, None) self.start_time = time.time() + SENSOR_LOG.info(f"Data capture for {self.tool.name} started {self.start_time}") async def __aexit__(self, *args: Any) -> None: - """Close csv file.""" - self.data_file.close() + """Finish the capture.""" + self.messenger.remove_listener(self) + SENSOR_LOG.info(f"Data capture for {self.tool.name} ended {time.time()}") - async def wait_for_complete( - self, wait_time: float = 10, message_index: int = 0 - ) -> None: - """Wait for the data to stop, only use this with a send_accumulated_data_request.""" + def set_stop_ack(self, message_index: int = 0) -> None: + """Tell the Listener which message index to wait for.""" self.event = asyncio.Event() self.expected_ack = message_index + + async def wait_for_complete(self, wait_time: float = 10) -> None: + """Wait for the data to stop.""" with suppress(asyncio.TimeoutError): await asyncio.wait_for(self.event.wait(), wait_time) if not self.event.is_set(): - LOG.error("Did not receive the full data set from the sensor") + SENSOR_LOG.error("Did not receive the full data set from the sensor") self.event = None def __call__( @@ -271,30 +283,44 @@ def __call__( arbitration_id: ArbitrationId, ) -> None: """Callback entry point for capturing messages.""" + if arbitration_id.parts.originating_node_id != self.tool: + # check that this is from the node we care about + return if isinstance(message, message_definitions.ReadFromSensorResponse): + if ( + message.payload.sensor_id.value is not self.id + or message.payload.sensor is not self.type + ): + # ignore sensor responses from other sensors + return data = sensor_types.SensorDataType.build( message.payload.sensor_data, message.payload.sensor - ).to_float() + ) self.response_queue.put_nowait(data) - current_time = round((time.time() - self.start_time), 3) - self.csv_writer.writerow([current_time, data]) # type: ignore + SENSOR_LOG.info( + f"Revieved from {arbitration_id}: {message.payload.sensor_id}:{message.payload.sensor}: {data}" + ) if isinstance(message, message_definitions.BatchReadFromSensorResponse): data_length = message.payload.data_length.value data_bytes = message.payload.sensor_data.value data_ints = [ - int.from_bytes(data_bytes[i * 4 : i * 4 + 4]) + int.from_bytes(data_bytes[i * 4 : i * 4 + 4], byteorder="little") for i in range(data_length) ] - for d in data_ints: - data = sensor_types.SensorDataType.build( - d, message.payload.sensor - ).to_float() - self.response_queue.put_nowait(data) - current_time = round((time.time() - self.start_time), 3) - self.csv_writer.writerow([current_time, data]) + data_floats = [ + sensor_types.SensorDataType.build(d, message.payload.sensor) + for d in data_ints + ] + + for d in data_floats: + self.response_queue.put_nowait(d) + SENSOR_LOG.info( + f"Revieved from {arbitration_id}: {message.payload.sensor_id}:{message.payload.sensor}: {data_floats}" + ) if isinstance(message, message_definitions.Acknowledgement): if ( self.event is not None and message.payload.message_index.value == self.expected_ack ): + SENSOR_LOG.info("Finished receiving sensor data") self.event.set() diff --git a/hardware/tests/opentrons_hardware/hardware_control/test_tool_sensors.py b/hardware/tests/opentrons_hardware/hardware_control/test_tool_sensors.py index 2dc7614da63..0c53b81057a 100644 --- a/hardware/tests/opentrons_hardware/hardware_control/test_tool_sensors.py +++ b/hardware/tests/opentrons_hardware/hardware_control/test_tool_sensors.py @@ -1,12 +1,10 @@ """Test the tool-sensor coordination code.""" import logging from mock import patch, AsyncMock, call -import os import pytest from contextlib import asynccontextmanager from typing import Iterator, List, Tuple, AsyncIterator, Any, Dict, Callable from opentrons_hardware.firmware_bindings.messages.message_definitions import ( - AddLinearMoveRequest, ExecuteMoveGroupRequest, MoveCompleted, ReadFromSensorResponse, @@ -50,7 +48,6 @@ SensorType, SensorThresholdMode, SensorOutputBinding, - MoveStopCondition, ) from opentrons_hardware.sensors.scheduler import SensorScheduler from opentrons_hardware.sensors.sensor_driver import SensorDriver @@ -187,78 +184,7 @@ def check_second_move( ), ] - def get_responder() -> Iterator[ - Callable[ - [NodeId, MessageDefinition], List[Tuple[NodeId, MessageDefinition, NodeId]] - ] - ]: - yield check_first_move - yield check_second_move - - responder_getter = get_responder() - - def move_responder( - node_id: NodeId, message: MessageDefinition - ) -> List[Tuple[NodeId, MessageDefinition, NodeId]]: - message.payload.serialize() - if isinstance(message, ExecuteMoveGroupRequest): - responder = next(responder_getter) - return responder(node_id, message) - else: - return [] - - message_send_loopback.add_responder(move_responder) - - position = await liquid_probe( - messenger=mock_messenger, - tool=target_node, - head_node=motor_node, - max_p_distance=70, - mount_speed=10, - plunger_speed=8, - threshold_pascals=threshold_pascals, - plunger_impulse_time=0.2, - num_baseline_reads=20, - csv_output=False, - sync_buffer_output=False, - can_bus_only_output=False, - sensor_id=SensorId.S0, - ) - assert position[motor_node].positions_only()[0] == 14 - assert mock_sensor_threshold.call_args_list[0][0][0] == SensorThresholdInformation( - sensor=sensor_info, - data=SensorDataType.build(threshold_pascals * 65536, sensor_info.sensor_type), - mode=SensorThresholdMode.absolute, - ) - - -@pytest.mark.parametrize( - "csv_output, sync_buffer_output, can_bus_only_output, move_stop_condition", - [ - (True, False, False, MoveStopCondition.sync_line), - (True, True, False, MoveStopCondition.sensor_report), - (False, False, True, MoveStopCondition.sync_line), - ], -) -async def test_liquid_probe_output_options( - mock_messenger: AsyncMock, - mock_bind_output: AsyncMock, - message_send_loopback: CanLoopback, - mock_sensor_threshold: AsyncMock, - csv_output: bool, - sync_buffer_output: bool, - can_bus_only_output: bool, - move_stop_condition: MoveStopCondition, -) -> None: - """Test that liquid_probe targets the right nodes.""" - sensor_info = SensorInformation( - sensor_type=SensorType.pressure, - sensor_id=SensorId.S0, - node_id=NodeId.pipette_left, - ) - test_csv_file: str = os.path.join(os.getcwd(), "test.csv") - - def check_first_move( + def check_third_move( node_id: NodeId, message: MessageDefinition ) -> List[Tuple[NodeId, MessageDefinition, NodeId]]: return [ @@ -274,44 +200,10 @@ def check_first_move( ack_id=UInt8Field(1), ) ), - NodeId.pipette_left, + motor_node, ) ] - def check_second_move( - node_id: NodeId, message: MessageDefinition - ) -> List[Tuple[NodeId, MessageDefinition, NodeId]]: - return [ - ( - NodeId.host, - MoveCompleted( - payload=MoveCompletedPayload( - group_id=UInt8Field(1), - seq_id=UInt8Field(0), - current_position_um=UInt32Field(14000), - encoder_position_um=Int32Field(14000), - position_flags=MotorPositionFlagsField(0), - ack_id=UInt8Field(2), - ) - ), - NodeId.head_l, - ), - ( - NodeId.host, - MoveCompleted( - payload=MoveCompletedPayload( - group_id=UInt8Field(1), - seq_id=UInt8Field(0), - current_position_um=UInt32Field(14000), - encoder_position_um=Int32Field(14000), - position_flags=MotorPositionFlagsField(0), - ack_id=UInt8Field(2), - ) - ), - NodeId.pipette_left, - ), - ] - def get_responder() -> Iterator[ Callable[ [NodeId, MessageDefinition], List[Tuple[NodeId, MessageDefinition, NodeId]] @@ -319,6 +211,7 @@ def get_responder() -> Iterator[ ]: yield check_first_move yield check_second_move + yield check_third_move responder_getter = get_responder() @@ -330,42 +223,26 @@ def move_responder( responder = next(responder_getter) return responder(node_id, message) else: - if ( - isinstance(message, AddLinearMoveRequest) - and node_id == NodeId.pipette_left - and message.payload.group_id == 2 - ): - assert ( - message.payload.request_stop_condition.value == move_stop_condition - ) return [] message_send_loopback.add_responder(move_responder) - try: - position = await liquid_probe( - messenger=mock_messenger, - tool=NodeId.pipette_left, - head_node=NodeId.head_l, - max_p_distance=70, - mount_speed=10, - plunger_speed=8, - threshold_pascals=14, - plunger_impulse_time=0.2, - num_baseline_reads=20, - csv_output=csv_output, - sync_buffer_output=sync_buffer_output, - can_bus_only_output=can_bus_only_output, - data_files={SensorId.S0: test_csv_file}, - sensor_id=SensorId.S0, - ) - finally: - if os.path.isfile(test_csv_file): - # clean up the test file this creates if it exists - os.remove(test_csv_file) - assert position[NodeId.head_l].positions_only()[0] == 14 + + position = await liquid_probe( + messenger=mock_messenger, + tool=target_node, + head_node=motor_node, + max_p_distance=70, + mount_speed=10, + plunger_speed=8, + threshold_pascals=threshold_pascals, + plunger_impulse_time=0.2, + num_baseline_reads=20, + sensor_id=SensorId.S0, + ) + assert position[motor_node].positions_only()[0] == 14 assert mock_sensor_threshold.call_args_list[0][0][0] == SensorThresholdInformation( sensor=sensor_info, - data=SensorDataType.build(14 * 65536, sensor_info.sensor_type), + data=SensorDataType.build(threshold_pascals * 65536, sensor_info.sensor_type), mode=SensorThresholdMode.absolute, ) diff --git a/opentrons-ai-client/src/assets/localization/en/protocol_generator.json b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json index f44eff34e73..2088e495482 100644 --- a/opentrons-ai-client/src/assets/localization/en/protocol_generator.json +++ b/opentrons-ai-client/src/assets/localization/en/protocol_generator.json @@ -1,4 +1,5 @@ { + "ai": "AI", "api": "API: An API level is 2.15", "application": "Application: Your protocol's name, describing what it does.", "commands": "Commands: List the protocol's steps, specifying quantities in microliters (uL) and giving exact source and destination locations.", @@ -15,6 +16,7 @@ "make_sure_your_prompt": "Write a prompt in a natural language for OpentronsAI to generate a protocol using the Opentrons Python Protocol API v2. The better the prompt, the better the quality of the protocol produced by OpentronsAI.", "modules_and_adapters": "Modules and adapters: Specify the modules and labware adapters required by your protocol.", "notes": "A few important things to note:", + "opentrons": "Opentrons", "opentronsai": "OpentronsAI", "ot2_pipettes": "OT-2 pipettes: Include volume, number of channels, and generation.", "pcr_flex": "PCR (Flex)", @@ -34,5 +36,7 @@ "well_allocations": "Well allocations: Describe where liquids should go in labware.", "what_if_you": "What if you don’t provide all of those pieces of information? OpentronsAI asks you to provide it!", "what_typeof_protocol": "What type of protocol do you need?", - "you": "You" + "you": "You", + "prompt_preview_submit_button": "Submit prompt", + "prompt_preview_placeholder_message": "As you complete the sections on the left, your prompt will be built here. When all requirements are met you will be able to generate the protocol." } diff --git a/opentrons-ai-client/src/molecules/Accordion/Accordion.stories.tsx b/opentrons-ai-client/src/molecules/Accordion/Accordion.stories.tsx new file mode 100644 index 00000000000..388267061b0 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Accordion/Accordion.stories.tsx @@ -0,0 +1,74 @@ +import { I18nextProvider } from 'react-i18next' +import { COLORS, Flex, SPACING } from '@opentrons/components' +import { i18n } from '../../i18n' +import { Accordion } from './index' + +import type { Meta, StoryObj } from '@storybook/react' + +const contentExample: React.ReactNode = ( +
+

What's your scientific application?

+

Describe what you are trying to do

+

+ Example: “The protocol performs automated liquid handling for Pierce BCA + Protein Assay Kit to determine protein concentrations in various sample + types, such as cell lysates and eluates of purification process." +

+
+) + +const meta: Meta = { + title: 'AI/molecules/Accordion', + component: Accordion, + decorators: [ + Story => ( + + + + + + ), + ], +} +export default meta +type Story = StoryObj + +export const AccordionCollapsed: Story = { + args: { + id: 'accordion', + handleClick: () => { + alert('Accordion clicked') + }, + heading: 'Application', + children: contentExample, + }, +} + +export const AccordionCompleted: Story = { + args: { + id: 'accordion', + isCompleted: true, + heading: 'Application', + }, +} + +export const AccordionExpanded: Story = { + args: { + id: 'accordion2', + isOpen: true, + heading: 'Application', + children: contentExample, + }, +} + +export const AccordionDisabled: Story = { + args: { + id: 'accordion3', + handleClick: () => { + alert('Accordion clicked') + }, + disabled: true, + heading: 'Application', + children: contentExample, + }, +} diff --git a/opentrons-ai-client/src/molecules/Accordion/__tests__/Accordion.test.tsx b/opentrons-ai-client/src/molecules/Accordion/__tests__/Accordion.test.tsx new file mode 100644 index 00000000000..4be089d8398 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Accordion/__tests__/Accordion.test.tsx @@ -0,0 +1,68 @@ +import type * as React from 'react' +import { describe, it, vi, beforeEach, expect } from 'vitest' +import { fireEvent, screen } from '@testing-library/react' +import { renderWithProviders } from '../../../__testing-utils__' + +import { Accordion } from '../index' + +const mockHandleClick = vi.fn() +const render = (props: React.ComponentProps) => { + return renderWithProviders() +} + +describe('Accordion', () => { + let props: React.ComponentProps + + beforeEach(() => { + props = { + id: 'accordion-test', + handleClick: mockHandleClick, + isOpen: false, + isCompleted: false, + heading: 'Accordion heading', + children:
Accordion content
, + } + }) + + it('should render an accordion with heading', () => { + render(props) + const accordion = screen.getByRole('button', { name: 'Accordion heading' }) + expect(accordion).toBeInTheDocument() + }) + + it('should display content if isOpen is true', () => { + props.isOpen = true + render(props) + const accordionContent = screen.getByText('Accordion content') + expect(accordionContent).toBeVisible() + }) + + it('should not display content if isOpen is false', () => { + render(props) + const accordionContent = screen.queryByText('Accordion content') + expect(accordionContent).not.toBeVisible() + }) + + it("should call handleClick when the accordion's header is clicked", () => { + render(props) + const accordionHeader = screen.getByRole('button', { + name: 'Accordion heading', + }) + fireEvent.click(accordionHeader) + expect(mockHandleClick).toHaveBeenCalled() + }) + + it('should display a check icon if isCompleted is true', () => { + props.isCompleted = true + render(props) + const checkIcon = screen.getByTestId('accordion-test-ot-check') + expect(checkIcon).toBeInTheDocument() + }) + + it('should not display a check icon if isCompleted is false', () => { + props.isCompleted = false + render(props) + const checkIcon = screen.queryByTestId('accordion-test-ot-check') + expect(checkIcon).not.toBeInTheDocument() + }) +}) diff --git a/opentrons-ai-client/src/molecules/Accordion/index.tsx b/opentrons-ai-client/src/molecules/Accordion/index.tsx new file mode 100644 index 00000000000..885f6af1745 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Accordion/index.tsx @@ -0,0 +1,158 @@ +import { useRef, useState, useEffect } from 'react' +import styled from 'styled-components' +import { + Flex, + Icon, + StyledText, + COLORS, + BORDERS, + DIRECTION_COLUMN, + SIZE_AUTO, + SPACING, + JUSTIFY_SPACE_BETWEEN, + ALIGN_CENTER, + CURSOR_POINTER, + TEXT_ALIGN_LEFT, + DISPLAY_FLEX, + OVERFLOW_HIDDEN, + CURSOR_DEFAULT, +} from '@opentrons/components' + +interface AccordionProps { + id?: string + handleClick: () => void + heading: string + isOpen?: boolean + isCompleted?: boolean + disabled?: boolean + children: React.ReactNode +} + +const ACCORDION = 'accordion' +const BUTTON = 'button' +const CONTENT = 'content' +const OT_CHECK = 'ot-check' + +const AccordionContainer = styled(Flex)<{ + isOpen: boolean + disabled: boolean +}>` + flex-direction: ${DIRECTION_COLUMN}; + width: 100%; + height: ${SIZE_AUTO}; + padding: ${SPACING.spacing24} ${SPACING.spacing32}; + border-radius: ${BORDERS.borderRadius16}; + background-color: ${COLORS.white}; + cursor: ${props => + props.isOpen || props.disabled ? `${CURSOR_DEFAULT}` : `${CURSOR_POINTER}`}; +` + +const AccordionButton = styled.button<{ isOpen: boolean; disabled: boolean }>` + display: ${DISPLAY_FLEX}; + justify-content: ${JUSTIFY_SPACE_BETWEEN}; + align-items: ${ALIGN_CENTER}; + width: 100%; + background: none; + border: none; + cursor: ${props => + props.isOpen || props.disabled ? `${CURSOR_DEFAULT}` : `${CURSOR_POINTER}`}; + text-align: ${TEXT_ALIGN_LEFT}; + + &:focus-visible { + outline: 2px solid ${COLORS.blue50}; + } +` + +const HeadingText = styled(StyledText)` + flex: 1; + margin-right: ${SPACING.spacing8}; +` + +const AccordionContent = styled.div<{ + id: string + isOpen: boolean + contentHeight: number +}>` + transition: height 0.3s ease, margin-top 0.3s ease, visibility 0.3s ease; + overflow: ${OVERFLOW_HIDDEN}; + height: ${props => (props.isOpen ? `${props.contentHeight}px` : '0')}; + margin-top: ${props => (props.isOpen ? `${SPACING.spacing16}` : '0')}; + pointer-events: ${props => (props.isOpen ? 'auto' : 'none')}; + visibility: ${props => (props.isOpen ? 'unset' : 'hidden')}; +` + +export function Accordion({ + id = ACCORDION, + handleClick, + isOpen = false, + isCompleted = false, + disabled = false, + heading = '', + children, +}: AccordionProps): JSX.Element { + const contentRef = useRef(null) + const [contentHeight, setContentHeight] = useState(0) + + useEffect(() => { + if (contentRef.current != null) { + setContentHeight(contentRef.current.scrollHeight) + } + }, [isOpen]) + + const handleContainerClick = (e: React.MouseEvent): void => { + // Prevent the click event from propagating to the button + if ( + (e.target as HTMLElement).tagName !== 'BUTTON' && + !disabled && + !isOpen + ) { + handleClick() + } + } + + const handleButtonClick = (e: React.MouseEvent): void => { + // Stop the event from propagating to the container + if (!isOpen && !disabled) { + e.stopPropagation() + handleClick() + } + } + + return ( + + + {heading} + {isCompleted && ( + + )} + + + {children} + + + ) +} diff --git a/opentrons-ai-client/src/molecules/Header/Header.stories.tsx b/opentrons-ai-client/src/molecules/Header/Header.stories.tsx new file mode 100644 index 00000000000..d451ee2d355 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Header/Header.stories.tsx @@ -0,0 +1,20 @@ +import type { Meta, StoryObj } from '@storybook/react' +import { Header as HeaderComponent } from '.' +import { COLORS, Flex, SPACING } from '@opentrons/components' + +const meta: Meta = { + title: 'AI/Molecules/Header', + component: HeaderComponent, + decorators: [ + Story => ( + + + + ), + ], +} +export default meta + +type Story = StoryObj + +export const ChatHeaderExample: Story = {} diff --git a/opentrons-ai-client/src/molecules/Header/__tests__/Header.test.tsx b/opentrons-ai-client/src/molecules/Header/__tests__/Header.test.tsx new file mode 100644 index 00000000000..31f3b01e629 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Header/__tests__/Header.test.tsx @@ -0,0 +1,23 @@ +import { renderWithProviders } from '../../../__testing-utils__' +import { i18n } from '../../../i18n' +import { Header } from '../index' +import { describe, it } from 'vitest' +import { screen } from '@testing-library/react' + +const render = (): ReturnType => { + return renderWithProviders(
, { + i18nInstance: i18n, + }) +} + +describe('Header', () => { + it('should render Header component', () => { + render() + screen.getByText('Opentrons') + }) + + it('should render log out button', () => { + render() + screen.getByText('Logout') + }) +}) diff --git a/opentrons-ai-client/src/molecules/Header/index.tsx b/opentrons-ai-client/src/molecules/Header/index.tsx new file mode 100644 index 00000000000..e909aeaf691 --- /dev/null +++ b/opentrons-ai-client/src/molecules/Header/index.tsx @@ -0,0 +1,63 @@ +import { useTranslation } from 'react-i18next' +import styled from 'styled-components' + +import { + Flex, + StyledText, + Link as LinkButton, + POSITION_ABSOLUTE, + TYPOGRAPHY, + COLORS, + POSITION_RELATIVE, + ALIGN_CENTER, + JUSTIFY_SPACE_BETWEEN, +} from '@opentrons/components' +import { useAuth0 } from '@auth0/auth0-react' + +const HeaderBar = styled(Flex)` + position: ${POSITION_RELATIVE}; + background-color: ${COLORS.white}; + width: 100%; + align-items: ${ALIGN_CENTER}; + height: 60px; +` + +const HeaderBarContent = styled(Flex)` + position: ${POSITION_ABSOLUTE}; + padding: 18px 32px; + justify-content: ${JUSTIFY_SPACE_BETWEEN}; + width: 100%; +` + +const HeaderGradientTitle = styled(StyledText)` + background: linear-gradient(90deg, #562566 0%, #893ba4 47.5%, #c189d4 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + font-size: 16px; +` + +const HeaderTitle = styled(StyledText)` + font-size: 16px; +` + +const LogoutButton = styled(LinkButton)` + color: ${COLORS.grey50}; + font-size: ${TYPOGRAPHY.fontSizeH3}; +` + +export function Header(): JSX.Element { + const { t } = useTranslation('protocol_generator') + const { logout } = useAuth0() + + return ( + + + + {t('opentrons')} + {t('ai')} + + logout()}>{t('logout')} + + + ) +} diff --git a/opentrons-ai-client/src/molecules/HeaderWithMeter/HeaderWithMeter.stories.tsx b/opentrons-ai-client/src/molecules/HeaderWithMeter/HeaderWithMeter.stories.tsx new file mode 100644 index 00000000000..80608117379 --- /dev/null +++ b/opentrons-ai-client/src/molecules/HeaderWithMeter/HeaderWithMeter.stories.tsx @@ -0,0 +1,20 @@ +import type { Meta, StoryObj } from '@storybook/react' +import { HeaderWithMeter as HeaderWithMeterComponent } from '.' +import { COLORS, Flex, SPACING } from '@opentrons/components' + +const meta: Meta = { + title: 'AI/Molecules/HeaderWithMeter', + component: HeaderWithMeterComponent, + decorators: [ + Story => ( + + + + ), + ], +} +export default meta + +type Story = StoryObj + +export const HeaderWithMeterExample: Story = {} diff --git a/opentrons-ai-client/src/molecules/HeaderWithMeter/__tests__/HeaderWithMeter.test.tsx b/opentrons-ai-client/src/molecules/HeaderWithMeter/__tests__/HeaderWithMeter.test.tsx new file mode 100644 index 00000000000..8d02aeb3e12 --- /dev/null +++ b/opentrons-ai-client/src/molecules/HeaderWithMeter/__tests__/HeaderWithMeter.test.tsx @@ -0,0 +1,51 @@ +import { renderWithProviders } from '../../../__testing-utils__' +import { i18n } from '../../../i18n' +import { HeaderWithMeter } from '../index' +import { describe, expect, it } from 'vitest' +import { screen, render as rtlRender } from '@testing-library/react' + +const render = (): ReturnType => { + return renderWithProviders(, { + i18nInstance: i18n, + }) +} + +describe('HeaderWithMeter', () => { + it('should render Header component', () => { + render() + screen.getByText('Opentrons') + }) + + it('should render progress bar', () => { + render() + screen.getByRole('progressbar') + }) + + it('should render progress bar with correct value', () => { + render() + const progressBar = screen.getByRole('progressbar') + expect(progressBar).toHaveAttribute('value', '0.3') + }) + + it('should update when progressPercentage prop changes', () => { + const { rerender } = rtlRender( + , + {} + ) + + const progressBar = screen.getByRole('progressbar') + expect(progressBar).toHaveAttribute('value', '0.3') + + rerender() + expect(progressBar).toHaveAttribute('value', '0.6') + + rerender() + expect(progressBar).toHaveAttribute('value', '1') + + rerender() + expect(progressBar).toHaveAttribute('value', '0') + + rerender() + expect(progressBar).toHaveAttribute('value', '0.2') + }) +}) diff --git a/opentrons-ai-client/src/molecules/HeaderWithMeter/index.tsx b/opentrons-ai-client/src/molecules/HeaderWithMeter/index.tsx new file mode 100644 index 00000000000..24bc1a89805 --- /dev/null +++ b/opentrons-ai-client/src/molecules/HeaderWithMeter/index.tsx @@ -0,0 +1,50 @@ +import { + Flex, + DIRECTION_COLUMN, + JUSTIFY_SPACE_BETWEEN, + COLORS, +} from '@opentrons/components' +import { Header } from '../Header' +import styled from 'styled-components' + +const SquareProgressBar = styled.progress` + width: 100%; + height: 4px; + border-radius: 0; + appearance: none; + + &::-webkit-progress-bar { + background-color: ${COLORS.grey30}; /* Background color of the progress bar */ + border-radius: 0; + } + + &::-webkit-progress-value { + background-color: ${COLORS.blue50}; /* Color of the progress value */ + border-radius: 0; + transition: width 1s; + } + + &::-moz-progress-bar { + background-color: ${COLORS.blue50}; /* Color of the progress value for Firefox */ + border-radius: 0; + } +` + +export interface ChatHeaderProps { + progressPercentage: number +} + +export function HeaderWithMeter({ + progressPercentage = 0.5, +}: ChatHeaderProps): JSX.Element { + return ( + +
+ + + ) +} diff --git a/opentrons-ai-client/src/molecules/PromptPreview/PromptPreview.stories.tsx b/opentrons-ai-client/src/molecules/PromptPreview/PromptPreview.stories.tsx new file mode 100644 index 00000000000..79e7b822dcc --- /dev/null +++ b/opentrons-ai-client/src/molecules/PromptPreview/PromptPreview.stories.tsx @@ -0,0 +1,84 @@ +import { I18nextProvider } from 'react-i18next' +import { COLORS, Flex, SPACING } from '@opentrons/components' +import { i18n } from '../../i18n' +import type { Meta, StoryObj } from '@storybook/react' +import { PromptPreview } from '.' + +const meta: Meta = { + title: 'AI/molecules/PromptPreview', + component: PromptPreview, + decorators: [ + Story => ( + + + + + + ), + ], +} +export default meta +type Story = StoryObj + +export const PromptPreviewExample: Story = { + args: { + isSubmitButtonEnabled: false, + handleSubmit: () => { + alert('Submit button clicked') + }, + promptPreviewData: [ + { + title: 'Application', + items: [ + 'Cherrypicking', + 'I have a Chlorine Reagent Set (Total), Ultra Low Range', + ], + }, + { + title: 'Instruments', + items: [ + 'Opentrons Flex', + 'Flex 1-Channel 50 uL', + 'Flex 8-Channel 1000 uL', + ], + }, + { + title: 'Modules', + items: [ + 'Thermocycler GEN2', + 'Heater-Shaker with Universal Flat Adaptor', + ], + }, + { + title: 'Labware and Liquids', + items: [ + 'Opentrons 96 Well Plate', + 'Thermocycler GEN2', + 'Opentrons 96 Deep Well Plate', + 'Liquid 1: In commodo lectus nec erat commodo blandit. Etiam leo dui, porttitor vel imperdiet sed, tristique nec nisl. Maecenas pulvinar sapien quis sodales imperdiet.', + 'Liquid 2: Lorem ipsum dolor sit amet, consectetur adipiscing elit.', + ], + }, + { + title: 'Steps', + items: [ + 'Fill the first column of a Elisa plate with 100 uL of Liquid 1', + 'Fill the second column of a Elisa plate with 100 uL of Liquid 2', + ], + }, + ], + }, +} + +export const PromptPreviewPlaceholderMessage: Story = { + args: { + isSubmitButtonEnabled: false, + handleSubmit: () => { + alert('Submit button clicked') + }, + }, +} diff --git a/opentrons-ai-client/src/molecules/PromptPreview/__tests__/PromptPreview.test.tsx b/opentrons-ai-client/src/molecules/PromptPreview/__tests__/PromptPreview.test.tsx new file mode 100644 index 00000000000..ab7d69543ba --- /dev/null +++ b/opentrons-ai-client/src/molecules/PromptPreview/__tests__/PromptPreview.test.tsx @@ -0,0 +1,109 @@ +import { screen } from '@testing-library/react' +import { describe, it, vi, beforeEach, expect } from 'vitest' +import { renderWithProviders } from '../../../__testing-utils__' +import { i18n } from '../../../i18n' +import { PromptPreview } from '..' + +const PROMPT_PREVIEW_PLACEHOLDER_MESSAGE = + 'As you complete the sections on the left, your prompt will be built here. When all requirements are met you will be able to generate the protocol.' + +const mockHandleClick = vi.fn() + +const render = (props: React.ComponentProps) => { + return renderWithProviders(, { + i18nInstance: i18n, + }) +} + +describe('PromptPreview', () => { + let props: React.ComponentProps + + beforeEach(() => { + props = { + isSubmitButtonEnabled: false, + handleSubmit: () => { + mockHandleClick() + }, + promptPreviewData: [ + { + title: 'Test Section 1', + items: ['item1', 'item2'], + }, + { + title: 'Test Section 2', + items: ['item3', 'item4'], + }, + ], + } + }) + + it('should render the PromptPreview component', () => { + render(props) + + expect(screen.getByText('Prompt')).toBeInTheDocument() + }) + + it('should render the submit button', () => { + render(props) + + expect(screen.getByText('Submit prompt')).toBeInTheDocument() + }) + + it('should render the placeholder message when all sections are empty', () => { + props.promptPreviewData = [ + { + title: 'Test Section 1', + items: [], + }, + { + title: 'Test Section 2', + items: [], + }, + ] + render(props) + + expect( + screen.getByText(PROMPT_PREVIEW_PLACEHOLDER_MESSAGE) + ).toBeInTheDocument() + }) + + it('should not render the placeholder message when at least one section has items', () => { + render(props) + + expect( + screen.queryByText(PROMPT_PREVIEW_PLACEHOLDER_MESSAGE) + ).not.toBeInTheDocument() + }) + + it('should render the sections with items', () => { + render(props) + + expect(screen.getByText('Test Section 1')).toBeInTheDocument() + expect(screen.getByText('Test Section 2')).toBeInTheDocument() + }) + + it('should display submit button disabled when isSubmitButtonEnabled is false', () => { + render(props) + + expect(screen.getByRole('button', { name: 'Submit prompt' })).toBeDisabled() + }) + + it('should display submit button enabled when isSubmitButtonEnabled is true', () => { + props.isSubmitButtonEnabled = true + render(props) + + expect( + screen.getByRole('button', { name: 'Submit prompt' }) + ).not.toBeDisabled() + }) + + it('should call handleSubmit when the submit button is clicked', () => { + props.isSubmitButtonEnabled = true + render(props) + + const submitButton = screen.getByRole('button', { name: 'Submit prompt' }) + submitButton.click() + + expect(mockHandleClick).toHaveBeenCalled() + }) +}) diff --git a/opentrons-ai-client/src/molecules/PromptPreview/index.tsx b/opentrons-ai-client/src/molecules/PromptPreview/index.tsx new file mode 100644 index 00000000000..b789cfbb4c7 --- /dev/null +++ b/opentrons-ai-client/src/molecules/PromptPreview/index.tsx @@ -0,0 +1,86 @@ +import styled from 'styled-components' +import { + Flex, + StyledText, + LargeButton, + COLORS, + JUSTIFY_SPACE_BETWEEN, + DIRECTION_COLUMN, + SIZE_AUTO, + DIRECTION_ROW, + ALIGN_CENTER, + SPACING, +} from '@opentrons/components' +import { PromptPreviewSection } from '../PromptPreviewSection' +import type { PromptPreviewSectionProps } from '../PromptPreviewSection' +import { useTranslation } from 'react-i18next' + +interface PromptPreviewProps { + isSubmitButtonEnabled?: boolean + handleSubmit: () => void + promptPreviewData: PromptPreviewSectionProps[] +} + +const PromptPreviewContainer = styled(Flex)` + flex-direction: ${DIRECTION_COLUMN}; + width: 100%; + height: ${SIZE_AUTO}; + padding-top: ${SPACING.spacing8}; + background-color: ${COLORS.transparent}; +` + +const PromptPreviewHeading = styled(Flex)` + flex-direction: ${DIRECTION_ROW}; + justify-content: ${JUSTIFY_SPACE_BETWEEN}; + align-items: ${ALIGN_CENTER}; + margin-bottom: ${SPACING.spacing16}; +` + +const PromptPreviewPlaceholderMessage = styled(StyledText)` + padding: 82px 73px; + color: ${COLORS.grey60}; + text-align: ${ALIGN_CENTER}; +` + +export function PromptPreview({ + isSubmitButtonEnabled = false, + handleSubmit, + promptPreviewData = [], +}: PromptPreviewProps): JSX.Element { + const { t } = useTranslation('protocol_generator') + + const areAllSectionsEmpty = (): boolean => { + return promptPreviewData.every(section => section.items.length === 0) + } + + return ( + + + Prompt + + + + {areAllSectionsEmpty() && ( + + {t('prompt_preview_placeholder_message')} + + )} + + {Object.values(promptPreviewData).map( + (section, index) => + section.items.length > 0 && ( + + ) + )} + + ) +} diff --git a/opentrons-ai-client/src/molecules/PromptPreviewSection/__tests__/PromptPreviewSection.test.tsx b/opentrons-ai-client/src/molecules/PromptPreviewSection/__tests__/PromptPreviewSection.test.tsx new file mode 100644 index 00000000000..e194bae5a8e --- /dev/null +++ b/opentrons-ai-client/src/molecules/PromptPreviewSection/__tests__/PromptPreviewSection.test.tsx @@ -0,0 +1,60 @@ +import type * as React from 'react' +import { screen } from '@testing-library/react' +import { describe, it, beforeEach, expect } from 'vitest' +import { renderWithProviders } from '../../../__testing-utils__' +import { i18n } from '../../../i18n' + +import { PromptPreviewSection } from '../index' + +const render = (props: React.ComponentProps) => { + return renderWithProviders(, { + i18nInstance: i18n, + }) +} + +describe('PromptPreviewSection', () => { + let props: React.ComponentProps + + beforeEach(() => { + props = { + title: 'Test Section', + items: ['test item 1', 'test item 2'], + } + }) + + it('should render the PromptPreviewSection component', () => { + render(props) + + expect(screen.getByText('Test Section')).toBeInTheDocument() + }) + + it('should render the section title', () => { + render(props) + + expect(screen.getByText('Test Section')).toBeInTheDocument() + }) + + it('should render the items', () => { + render(props) + + expect(screen.getByText('test item 1')).toBeInTheDocument() + expect(screen.getByText('test item 2')).toBeInTheDocument() + }) + + it("should not render the item tag if it's an empty string", () => { + props.items = ['test item 1', ''] + render(props) + + const items = screen.getAllByTestId('Tag_default') + expect(items).toHaveLength(1) + }) + + it('should render the item with the correct max item width', () => { + props.items = ['test item 1 long text long text long text long text'] + props.itemMaxWidth = '23%' + render(props) + + const item = screen.getByTestId('item-tag-wrapper-0') + expect(item).toHaveStyle({ maxWidth: '23%' }) + }) +}) diff --git a/opentrons-ai-client/src/molecules/PromptPreviewSection/index.tsx b/opentrons-ai-client/src/molecules/PromptPreviewSection/index.tsx new file mode 100644 index 00000000000..c781e0308d7 --- /dev/null +++ b/opentrons-ai-client/src/molecules/PromptPreviewSection/index.tsx @@ -0,0 +1,74 @@ +import styled from 'styled-components' +import { + Flex, + StyledText, + Tag, + DIRECTION_COLUMN, + WRAP, + SPACING, +} from '@opentrons/components' + +export interface PromptPreviewSectionProps { + title: string + items: string[] + itemMaxWidth?: string +} + +const PromptPreviewSectionContainer = styled(Flex)` + flex-direction: ${DIRECTION_COLUMN}; + margin-top: ${SPACING.spacing32}; +` + +const SectionHeading = styled(StyledText)` + margin-bottom: ${SPACING.spacing8}; +` + +const TagsContainer = styled(Flex)` + grid-gap: ${SPACING.spacing4}; + flex-wrap: ${WRAP}; + justify-content: flex-start; + width: 100%; +` + +const TagItemWrapper = styled.div<{ itemMaxWidth: string }>` + display: flex; + width: auto; + white-space: nowrap; + overflow: hidden; + max-width: ${props => props.itemMaxWidth}; + + & > div { + overflow: hidden; + + > p { + overflow: hidden; + text-overflow: ellipsis; + } + } +` + +export function PromptPreviewSection({ + title, + items, + itemMaxWidth = '35%', +}: PromptPreviewSectionProps): JSX.Element { + return ( + + {title} + + {items.map( + (item: string, index: number) => + item.trim() !== '' && ( + + + + ) + )} + + + ) +} diff --git a/protocol-designer/cypress/e2e/batchEdit.cy.js b/protocol-designer/cypress/e2e/batchEdit.cy.js index 300983ad9b0..8bd7d284287 100644 --- a/protocol-designer/cypress/e2e/batchEdit.cy.js +++ b/protocol-designer/cypress/e2e/batchEdit.cy.js @@ -76,7 +76,7 @@ describe('Batch Edit Transform', () => { // Delete the duplicated steps cy.get('#ClickableIcon_delete').click() - cy.get('button').contains('delete steps').click() + cy.get('button').contains('Delete steps').click() cy.get('#StepSelectionBannerComponent_numberStepsSelected') .contains('1 steps selected') .should('exist') diff --git a/protocol-designer/src/assets/localization/en/alert.json b/protocol-designer/src/assets/localization/en/alert.json index e07431bf188..b8f73cc290b 100644 --- a/protocol-designer/src/assets/localization/en/alert.json +++ b/protocol-designer/src/assets/localization/en/alert.json @@ -259,7 +259,11 @@ "no_commands": { "heading": "Your protocol has no steps", "body1": "This protocol has no steps in it- there's nothing for the robot to do! Before trying to run this on your robot add at least one step between your Starting Deck State and Final Deck State.", - "body2": "Learn more about building steps " + "body2": "Learn more about building steps ", + "redesign": { + "heading": "Protocol has no steps", + "body": "This protocol has no steps. Before trying to run this protocol on your robot, add at least one step." + } }, "unused_pipette_and_module": { "heading": "Unused pipette and module", diff --git a/protocol-designer/src/assets/localization/en/feature_flags.json b/protocol-designer/src/assets/localization/en/feature_flags.json index f83f09e345c..a70e23931c9 100644 --- a/protocol-designer/src/assets/localization/en/feature_flags.json +++ b/protocol-designer/src/assets/localization/en/feature_flags.json @@ -20,10 +20,6 @@ "title": "Enable redesign", "description": "A whole new world." }, - "OT_PD_ENABLE_MOAM": { - "title": "Enable multiple modules", - "description": "Enable multiple heater-shakers and magnetic blocks for Flex only." - }, "OT_PD_ENABLE_COMMENT": { "title": "Enable comment step", "description": "You can add comments anywhere between timeline steps." diff --git a/protocol-designer/src/assets/localization/en/modal.json b/protocol-designer/src/assets/localization/en/modal.json index 1115b90a18b..ebf4e0d9b80 100644 --- a/protocol-designer/src/assets/localization/en/modal.json +++ b/protocol-designer/src/assets/localization/en/modal.json @@ -255,7 +255,14 @@ "body2": "Build a pause later if you want your protocol to proceed to the next steps while the temperature module ramps up to {{temperature}}°C.", "heater_shaker_pause_later": "Build a pause later if you want your protocol to proceed to the next steps while the Heater-Shaker module goes to {{temperature}}°C", "now_button": "Pause protocol now", - "later_button": "I will build a pause later" + "later_button": "I will build a pause later", + "redesign": { + "title": "Pause protocol until {{module}} is at {{temp}}˚C", + "body1": "Build a pause step to wait until {{module}} reaches {{temp}}˚C before continuing to the next step.", + "body2": "Build a pause step later if you want your protocol to proceed to the next step while the {{module}} goes to {{temp}}˚C", + "build_pause_later": "Build pause later", + "pause_protocol": "Pause protocol" + } }, "step_notes": { "title": "Step Notes" diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/HandleEnter.tsx b/protocol-designer/src/atoms/HandleEnter/index.tsx similarity index 87% rename from protocol-designer/src/pages/CreateNewProtocolWizard/HandleEnter.tsx rename to protocol-designer/src/atoms/HandleEnter/index.tsx index 5729f8ceb05..4fc99026ca7 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/HandleEnter.tsx +++ b/protocol-designer/src/atoms/HandleEnter/index.tsx @@ -1,8 +1,8 @@ -import type * as React from 'react' import { HandleKeypress } from '@opentrons/components' +import type { ReactNode } from 'react' interface HandleEnterProps { - children: React.ReactNode + children: ReactNode onEnter: () => void } diff --git a/protocol-designer/src/components/EditModules.tsx b/protocol-designer/src/components/EditModules.tsx index 6ed7c8a6054..31b392c29a5 100644 --- a/protocol-designer/src/components/EditModules.tsx +++ b/protocol-designer/src/components/EditModules.tsx @@ -12,7 +12,6 @@ import { } from '../step-forms' import { moveDeckItem } from '../labware-ingred/actions/actions' import { getRobotType } from '../file-data/selectors' -import { getEnableMoam } from '../feature-flags/selectors' import { EditMultipleModulesModal } from './modals/EditModulesModal/EditMultipleModulesModal' import { useBlockingHint } from './Hints/useBlockingHint' import { MagneticModuleWarningModalContent } from './modals/EditModulesModal/MagneticModuleWarningModalContent' @@ -34,14 +33,15 @@ export interface ModelModuleInfo { export const EditModules = (props: EditModulesProps): JSX.Element => { const { onCloseClick, moduleToEdit } = props - const enableMoam = useSelector(getEnableMoam) const { moduleId, moduleType } = moduleToEdit const _initialDeckSetup = useSelector(stepFormSelectors.getInitialDeckSetup) const robotType = useSelector(getRobotType) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] const showMultipleModuleModal = robotType === FLEX_ROBOT_TYPE && MOAM_MODULE_TYPES.includes(moduleType) diff --git a/protocol-designer/src/components/StepEditForm/index.tsx b/protocol-designer/src/components/StepEditForm/index.tsx index 738d86a2ed8..8b54c56c891 100644 --- a/protocol-designer/src/components/StepEditForm/index.tsx +++ b/protocol-designer/src/components/StepEditForm/index.tsx @@ -2,6 +2,12 @@ import { useState } from 'react' import { useTranslation } from 'react-i18next' import { connect } from 'react-redux' import { useConditionalConfirm } from '@opentrons/components' +import { + getModuleDisplayName, + HEATERSHAKER_MODULE_TYPE, + TEMPERATURE_MODULE_TYPE, +} from '@opentrons/shared-data' + import { actions } from '../../steplist' import { actions as stepsActions } from '../../ui/steps' import { resetScrollElements } from '../../ui/steps/utils' @@ -12,7 +18,6 @@ import { import { maskField } from '../../steplist/fieldLevel' import { getInvariantContext } from '../../step-forms/selectors' import { AutoAddPauseUntilTempStepModal } from '../modals/AutoAddPauseUntilTempStepModal' -import { AutoAddPauseUntilHeaterShakerTempStepModal } from '../modals/AutoAddPauseUntilHeaterShakerTempStepModal' import { ConfirmDeleteModal, DELETE_STEP_FORM, @@ -166,20 +171,31 @@ const StepEditFormManager = ( onContinueClick={confirmClose} /> )} - {showAddPauseUntilTempStepModal && ( + {showAddPauseUntilTempStepModal || + showAddPauseUntilHeaterShakerTempStepModal ? ( - )} - {showAddPauseUntilHeaterShakerTempStepModal && ( - - )} + ) : null} { labware: {}, additionalEquipmentOnDeck: {}, }) - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(EditModulesModal).mockReturnValue(
mock EditModulesModal
) diff --git a/protocol-designer/src/components/modals/AutoAddPauseUntilHeaterShakerTempStepModal.tsx b/protocol-designer/src/components/modals/AutoAddPauseUntilHeaterShakerTempStepModal.tsx deleted file mode 100644 index c630f7be3e9..00000000000 --- a/protocol-designer/src/components/modals/AutoAddPauseUntilHeaterShakerTempStepModal.tsx +++ /dev/null @@ -1,57 +0,0 @@ -import { useTranslation } from 'react-i18next' -import { - AlertModal, - OutlineButton, - DeprecatedPrimaryButton, -} from '@opentrons/components' -import modalStyles from './modal.module.css' -import styles from './AutoAddPauseUntilTempStepModal.module.css' - -interface Props { - displayTemperature: string - handleCancelClick: () => unknown - handleContinueClick: () => unknown -} - -export const AutoAddPauseUntilHeaterShakerTempStepModal = ( - props: Props -): JSX.Element => { - const { t } = useTranslation('modal') - return ( - -
- {t('auto_add_pause_until_temp_step.heater_shaker_title', { - temperature: props.displayTemperature, - })} -
-

- {t('auto_add_pause_until_temp_step.body1', { - temperature: props.displayTemperature, - })} -

-

- {t('auto_add_pause_until_temp_step.heater_shaker_pause_later', { - temperature: props.displayTemperature, - })} -

-
- - {t('auto_add_pause_until_temp_step.later_button')} - - - {t('auto_add_pause_until_temp_step.now_button')} - -
-
- ) -} diff --git a/protocol-designer/src/components/modals/AutoAddPauseUntilTempStepModal.tsx b/protocol-designer/src/components/modals/AutoAddPauseUntilTempStepModal.tsx index 399e4c76d05..f15af16d347 100644 --- a/protocol-designer/src/components/modals/AutoAddPauseUntilTempStepModal.tsx +++ b/protocol-designer/src/components/modals/AutoAddPauseUntilTempStepModal.tsx @@ -1,55 +1,165 @@ import { useTranslation } from 'react-i18next' +import { useSelector } from 'react-redux' import { AlertModal, - OutlineButton, + ALIGN_FLEX_END, + COLORS, DeprecatedPrimaryButton, + DIRECTION_COLUMN, + Flex, + Icon, + Modal, + OutlineButton, + PrimaryButton, + SecondaryButton, + SPACING, + StyledText, } from '@opentrons/components' +import { TEMPERATURE_MODULE_TYPE } from '@opentrons/shared-data' + +import { getEnableRedesign } from '../../feature-flags/selectors' import modalStyles from './modal.module.css' import styles from './AutoAddPauseUntilTempStepModal.module.css' +import type { ModuleType } from '@opentrons/shared-data' + interface Props { displayTemperature: string - handleCancelClick: () => unknown - handleContinueClick: () => unknown + handleCancelClick: () => void + handleContinueClick: () => void + moduleType: ModuleType + displayModule?: string } export const AutoAddPauseUntilTempStepModal = (props: Props): JSX.Element => { + const { + displayTemperature, + handleCancelClick, + handleContinueClick, + moduleType, + displayModule, + } = props const { t } = useTranslation('modal') - return ( - -
- {t('auto_add_pause_until_temp_step.title', { - temperature: props.displayTemperature, - })} -
-

- {t('auto_add_pause_until_temp_step.body1', { - temperature: props.displayTemperature, - })} -

-

- {t('auto_add_pause_until_temp_step.body2', { - temperature: props.displayTemperature, + const enableRedesign = useSelector(getEnableRedesign) + if (enableRedesign) { + return ( + -

- - {t('auto_add_pause_until_temp_step.later_button')} - - - {t('auto_add_pause_until_temp_step.now_button')} - -
-
- ) + titleElement1={ + + } + childrenPadding={SPACING.spacing24} + footer={ + + + + {t('auto_add_pause_until_temp_step.redesign.build_pause_later')} + + + + + {t('auto_add_pause_until_temp_step.redesign.pause_protocol')} + + + + } + > + + + {t('auto_add_pause_until_temp_step.redesign.body1', { + module: displayModule, + temp: displayTemperature, + })} + + + {t('auto_add_pause_until_temp_step.redesign.body2', { + module: displayModule, + temp: displayTemperature, + })} + + + + ) + } else { + return moduleType === TEMPERATURE_MODULE_TYPE ? ( + +
+ {t('auto_add_pause_until_temp_step.title', { + temperature: displayTemperature, + })} +
+

+ {t('auto_add_pause_until_temp_step.body1', { + temperature: displayTemperature, + })} +

+

+ {t('auto_add_pause_until_temp_step.body2', { + temperature: displayTemperature, + })} +

+
+ + {t('auto_add_pause_until_temp_step.later_button')} + + + {t('auto_add_pause_until_temp_step.now_button')} + +
+
+ ) : ( + +
+ {t('auto_add_pause_until_temp_step.heater_shaker_title', { + temperature: displayTemperature, + })} +
+

+ {t('auto_add_pause_until_temp_step.body1', { + temperature: displayTemperature, + })} +

+

+ {t('auto_add_pause_until_temp_step.heater_shaker_pause_later', { + temperature: displayTemperature, + })} +

+
+ + {t('auto_add_pause_until_temp_step.later_button')} + + + {t('auto_add_pause_until_temp_step.now_button')} + +
+
+ ) + } } diff --git a/protocol-designer/src/components/modals/ConfirmDeleteModal.tsx b/protocol-designer/src/components/modals/ConfirmDeleteModal.tsx index ad614e5ba64..98800e21219 100644 --- a/protocol-designer/src/components/modals/ConfirmDeleteModal.tsx +++ b/protocol-designer/src/components/modals/ConfirmDeleteModal.tsx @@ -1,8 +1,22 @@ import type * as React from 'react' import { createPortal } from 'react-dom' import { useTranslation } from 'react-i18next' -import { AlertModal } from '@opentrons/components' +import { useSelector } from 'react-redux' +import { + ALIGN_FLEX_END, + AlertModal, + AlertPrimaryButton, + COLORS, + Flex, + Icon, + Modal, + SPACING, + SecondaryButton, + StyledText, +} from '@opentrons/components' +import { getEnableRedesign } from '../../feature-flags/selectors' import { getMainPagePortalEl } from '../portals/MainPageModalPortal' +import { getTopPortalEl } from '../portals/TopPortal' import modalStyles from './modal.module.css' export const DELETE_PROFILE_CYCLE: 'deleteProfileCycle' = 'deleteProfileCycle' @@ -31,12 +45,12 @@ interface Props { } export function ConfirmDeleteModal(props: Props): JSX.Element { - const { t } = useTranslation(['modal', 'button']) + const { i18n, t } = useTranslation(['modal', 'button']) const { modalType, onCancelClick, onContinueClick } = props - const cancelCopy = t('button:cancel') - const continueCopy = t( - `confirm_delete_modal.${modalType}.confirm_button`, - t('button:continue') + const cancelCopy = i18n.format(t('button:cancel'), 'capitalize') + const continueCopy = i18n.format( + t(`confirm_delete_modal.${modalType}.confirm_button`, t('button:continue')), + 'capitalize' ) const buttons = [ { title: cancelCopy, children: cancelCopy, onClick: onCancelClick }, @@ -47,17 +61,50 @@ export function ConfirmDeleteModal(props: Props): JSX.Element { onClick: onContinueClick, }, ] - return createPortal( - -

{t(`confirm_delete_modal.${modalType}.body`)}

-
, - getMainPagePortalEl() - ) + const enableRedesign = useSelector(getEnableRedesign) + return enableRedesign + ? createPortal( + + } + footer={ + + + + {cancelCopy} + + + + + {continueCopy} + + + + } + > + + {t(`confirm_delete_modal.${modalType}.body`)} + + , + getTopPortalEl() + ) + : createPortal( + +

{t(`confirm_delete_modal.${modalType}.body`)}

+
, + getMainPagePortalEl() + ) } diff --git a/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx b/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx index 2f63d6667bb..5409217dfd3 100644 --- a/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx +++ b/protocol-designer/src/components/modals/CreateFileWizard/ModulesAndOtherTile.tsx @@ -37,7 +37,6 @@ import gripperImage from '../../../assets/images/flex_gripper.png' import wasteChuteImage from '../../../assets/images/waste_chute.png' import trashBinImage from '../../../assets/images/flex_trash_bin.png' import { uuid } from '../../../utils' -import { getEnableMoam } from '../../../feature-flags/selectors' import { selectors as featureFlagSelectors } from '../../../feature-flags' import { CrashInfoBox, ModuleDiagram } from '../../modules' import { ModuleFields } from '../FilePipettesModal/ModuleFields' @@ -202,15 +201,17 @@ export function ModulesAndOtherTile(props: WizardTileProps): JSX.Element { function FlexModuleFields(props: WizardTileProps): JSX.Element { const { watch, setValue } = props - const enableMoam = useSelector(getEnableMoam) const modules = watch('modules') const additionalEquipment = watch('additionalEquipment') const enableAbsorbanceReader = useSelector( featureFlagSelectors.getEnableAbsorbanceReader ) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] + const moduleTypesOnDeck = modules != null ? Object.values(modules).map(module => module.type) : [] diff --git a/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx b/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx index fdc4e9b86e5..0b4e99952a6 100644 --- a/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx +++ b/protocol-designer/src/components/modals/CreateFileWizard/__tests__/ModulesAndOtherTile.test.tsx @@ -5,10 +5,7 @@ import { fireEvent, screen, cleanup } from '@testing-library/react' import { FLEX_ROBOT_TYPE, OT2_ROBOT_TYPE } from '@opentrons/shared-data' import { renderWithProviders } from '../../../../__testing-utils__' import { i18n } from '../../../../assets/localization' -import { - getDisableModuleRestrictions, - getEnableMoam, -} from '../../../../feature-flags/selectors' +import { getDisableModuleRestrictions } from '../../../../feature-flags/selectors' import { CrashInfoBox } from '../../../modules' import { ModuleFields } from '../../FilePipettesModal/ModuleFields' import { ModulesAndOtherTile } from '../ModulesAndOtherTile' @@ -61,7 +58,6 @@ describe('ModulesAndOtherTile', () => { ...props, ...mockWizardTileProps, } as WizardTileProps - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(CrashInfoBox).mockReturnValue(
mock CrashInfoBox
) vi.mocked(EquipmentOption).mockReturnValue(
mock EquipmentOption
) vi.mocked(getDisableModuleRestrictions).mockReturnValue(false) diff --git a/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilHeaterShakerTempStepModal.test.tsx b/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilHeaterShakerTempStepModal.test.tsx deleted file mode 100644 index 41b2becffbc..00000000000 --- a/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilHeaterShakerTempStepModal.test.tsx +++ /dev/null @@ -1,49 +0,0 @@ -import type * as React from 'react' -import { describe, it, expect, vi, beforeEach } from 'vitest' -import { fireEvent, screen } from '@testing-library/react' -import { renderWithProviders } from '../../../__testing-utils__' -import { i18n } from '../../../assets/localization' -import { AutoAddPauseUntilHeaterShakerTempStepModal } from '../AutoAddPauseUntilHeaterShakerTempStepModal' - -const render = ( - props: React.ComponentProps -) => { - return renderWithProviders( - , - { - i18nInstance: i18n, - } - )[0] -} - -describe('AutoAddPauseUntilHeaterShakerTempStepModal ', () => { - let props: React.ComponentProps< - typeof AutoAddPauseUntilHeaterShakerTempStepModal - > - beforeEach(() => { - props = { - displayTemperature: '10', - handleCancelClick: vi.fn(), - handleContinueClick: vi.fn(), - } - }) - - it('should render the correct text with 10 C temp and buttons are clickable', () => { - render(props) - screen.getByText('Pause protocol until Heater-Shaker module is at 10°C?') - screen.getByText( - 'Pause protocol now to wait until module reaches 10°C before continuing on to the next step.' - ) - screen.getByText( - 'Build a pause later if you want your protocol to proceed to the next steps while the Heater-Shaker module goes to 10°C' - ) - const cancelBtn = screen.getByRole('button', { - name: 'I will build a pause later', - }) - const contBtn = screen.getByRole('button', { name: 'Pause protocol now' }) - fireEvent.click(cancelBtn) - expect(props.handleCancelClick).toHaveBeenCalled() - fireEvent.click(contBtn) - expect(props.handleContinueClick).toHaveBeenCalled() - }) -}) diff --git a/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilTempStepModal.test.tsx b/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilTempStepModal.test.tsx index aa8d4601996..8fb3d84c1c0 100644 --- a/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilTempStepModal.test.tsx +++ b/protocol-designer/src/components/modals/__tests__/AutoAddPauseUntilTempStepModal.test.tsx @@ -4,6 +4,9 @@ import { fireEvent, screen } from '@testing-library/react' import { renderWithProviders } from '../../../__testing-utils__' import { i18n } from '../../../assets/localization' import { AutoAddPauseUntilTempStepModal } from '../AutoAddPauseUntilTempStepModal' +import { TEMPERATURE_MODULE_TYPE } from '@opentrons/shared-data' + +vi.mock('../../../feature-flags/selectors') const render = ( props: React.ComponentProps @@ -20,6 +23,7 @@ describe('AutoAddPauseUntilTempStepModal ', () => { displayTemperature: '10', handleCancelClick: vi.fn(), handleContinueClick: vi.fn(), + moduleType: TEMPERATURE_MODULE_TYPE, } }) it('should render the correct text with 10 C temp and buttons are clickable', () => { diff --git a/protocol-designer/src/feature-flags/reducers.ts b/protocol-designer/src/feature-flags/reducers.ts index bcb586acf9a..1f9999be001 100644 --- a/protocol-designer/src/feature-flags/reducers.ts +++ b/protocol-designer/src/feature-flags/reducers.ts @@ -26,7 +26,6 @@ const initialFlags: Flags = { OT_PD_ENABLE_ABSORBANCE_READER: process.env.OT_PD_ENABLE_ABSORBANCE_READER === '1' || false, OT_PD_ENABLE_REDESIGN: process.env.OT_PD_ENABLE_REDESIGN === '1' || false, - OT_PD_ENABLE_MOAM: process.env.OT_PD_ENABLE_MOAM === '1' || false, OT_PD_ENABLE_COMMENT: process.env.OT_PD_ENABLE_COMMENT === '1' || false, OT_PD_ENABLE_RETURN_TIP: process.env.OT_PD_ENABLE_RETURN_TIP === '1' || false, OT_PD_ENABLE_HOT_KEYS_DISPLAY: diff --git a/protocol-designer/src/feature-flags/selectors.ts b/protocol-designer/src/feature-flags/selectors.ts index a4c3baf05be..896eb5db254 100644 --- a/protocol-designer/src/feature-flags/selectors.ts +++ b/protocol-designer/src/feature-flags/selectors.ts @@ -33,10 +33,6 @@ export const getEnableRedesign: Selector = createSelector( getFeatureFlagData, flags => flags.OT_PD_ENABLE_REDESIGN ?? false ) -export const getEnableMoam: Selector = createSelector( - getFeatureFlagData, - flags => flags.OT_PD_ENABLE_MOAM ?? false -) export const getEnableComment: Selector = createSelector( getFeatureFlagData, flags => flags.OT_PD_ENABLE_COMMENT ?? false diff --git a/protocol-designer/src/feature-flags/types.ts b/protocol-designer/src/feature-flags/types.ts index f37d77bc814..774d2c5fa5e 100644 --- a/protocol-designer/src/feature-flags/types.ts +++ b/protocol-designer/src/feature-flags/types.ts @@ -31,7 +31,6 @@ export type FlagTypes = | 'OT_PD_ALLOW_ALL_TIPRACKS' | 'OT_PD_ENABLE_ABSORBANCE_READER' | 'OT_PD_ENABLE_REDESIGN' - | 'OT_PD_ENABLE_MOAM' | 'OT_PD_ENABLE_COMMENT' | 'OT_PD_ENABLE_RETURN_TIP' | 'OT_PD_ENABLE_HOT_KEYS_DISPLAY' @@ -46,7 +45,6 @@ export const allFlags: FlagTypes[] = [ 'PRERELEASE_MODE', 'OT_PD_ENABLE_ABSORBANCE_READER', 'OT_PD_ENABLE_REDESIGN', - 'OT_PD_ENABLE_MOAM', 'OT_PD_ENABLE_COMMENT', 'OT_PD_ENABLE_RETURN_TIP', ] diff --git a/protocol-designer/src/organisms/DefineLiquidsModal/index.tsx b/protocol-designer/src/organisms/DefineLiquidsModal/index.tsx index b2e0c76bac1..e9943676c35 100644 --- a/protocol-designer/src/organisms/DefineLiquidsModal/index.tsx +++ b/protocol-designer/src/organisms/DefineLiquidsModal/index.tsx @@ -33,6 +33,7 @@ import * as labwareIngredActions from '../../labware-ingred/actions' import { selectors as labwareIngredSelectors } from '../../labware-ingred/selectors' import { swatchColors } from '../../components/swatchColors' import { checkColor } from './utils' +import { HandleEnter } from '../../atoms/HandleEnter' import type { ColorResult, RGBColor } from 'react-color' import type { ThunkDispatch } from 'redux-thunk' @@ -47,7 +48,10 @@ interface LiquidEditFormValues { [key: string]: unknown } -const INVALID_DISPLAY_COLORS = ['#000000', '#ffffff', DEPRECATED_WHALE_GREY] +const BLACK = '#000000' +const WHITE = '#ffffff' + +const INVALID_DISPLAY_COLORS = [BLACK, WHITE, DEPRECATED_WHALE_GREY] const liquidEditFormSchema: any = Yup.object().shape({ name: Yup.string().required('liquid name is required'), @@ -92,8 +96,7 @@ export function DefineLiquidsModal( const allIngredientGroupFields = useSelector( labwareIngredSelectors.allIngredientGroupFields ) - const liquidGroupId = - selectedLiquidGroupState && selectedLiquidGroupState.liquidGroupId + const liquidGroupId = selectedLiquidGroupState.liquidGroupId const deleteLiquidGroup = (): void => { if (liquidGroupId != null) { dispatch(labwareIngredActions.deleteLiquidGroup(liquidGroupId)) @@ -110,7 +113,7 @@ export function DefineLiquidsModal( dispatch( labwareIngredActions.editLiquidGroup({ ...formData, - liquidGroupId: liquidGroupId, + liquidGroupId, }) ) onClose() @@ -159,105 +162,116 @@ export function DefineLiquidsModal( } return ( - - - - {initialValues.name} - - - ) : ( - t('define_liquid') - ) - } - type="info" - onClose={onClose} + { + void handleSubmit(handleLiquidEdits)() + }} > -
- <> - {showColorPicker ? ( - - ( - { - const hex = rgbaToHex(color.rgb) - setValue('displayColor', hex) - field.onChange(hex) - }} - /> - )} - /> + + + + {initialValues.name} + - ) : null} - - - + ) : ( + t('define_liquid') + ) + } + type="info" + onClose={onClose} + > + { + void handleSubmit(handleLiquidEdits)() + }} + > + <> + {showColorPicker ? ( - - {t('name')} - ( - { + const hex = rgbaToHex(color.rgb) + setValue('displayColor', hex) + field.onChange(hex) + }} /> )} /> - - - {t('description')} - - - - - - {t('display_color')} - + ) : null} - { - setShowColorPicker(prev => !prev) - }} - color={color} - size="medium" - /> - - {/* NOTE: this is for serialization if we decide to add it back */} - {/* + + + + {t('name')} + + ( + + )} + /> + + + + {t('description')} + + + + + + {t('display_color')} + + + { + setShowColorPicker(prev => !prev) + }} + color={color} + size="medium" + /> + + {/* NOTE: this is for serialization if we decide to add it back */} + {/* ( @@ -271,44 +285,45 @@ export function DefineLiquidsModal( /> )} /> */} - - - {selectedIngredFields != null ? ( - - - {t('delete_liquid')} - - - ) : ( - - {t('shared:close')} - - )} - + - {t('shared:save')} - + {selectedIngredFields != null ? ( + + + {t('delete_liquid')} + + + ) : ( + + {t('shared:close')} + + )} + + {t('shared:save')} + + - - -
-
+ + + + ) } diff --git a/protocol-designer/src/organisms/EditInstrumentsModal/index.tsx b/protocol-designer/src/organisms/EditInstrumentsModal/index.tsx index 90ff62c327d..3d131e6305c 100644 --- a/protocol-designer/src/organisms/EditInstrumentsModal/index.tsx +++ b/protocol-designer/src/organisms/EditInstrumentsModal/index.tsx @@ -69,6 +69,7 @@ import { selectors as stepFormSelectors } from '../../step-forms' import { BUTTON_LINK_STYLE } from '../../atoms' import { getSectionsFromPipetteName, getShouldShowPipetteType } from './utils' import { editPipettes } from './editPipettes' +import { HandleEnter } from '../../atoms/HandleEnter' import type { PipetteMount, PipetteName } from '@opentrons/shared-data' import type { @@ -151,422 +152,435 @@ export function EditInstrumentsModal( // if a user removes all pipettes, left mount is the first target. const targetPipetteMount = leftPipette == null ? 'left' : 'right' + const handleOnSave = (): void => { + if (page === 'overview') { + onClose() + } else { + setPage('overview') + editPipettes( + labware, + pipettes, + orderedStepIds, + dispatch, + mount, + selectedPip as PipetteName, + selectedTips, + leftPipette, + rightPipette + ) + } + } + return createPortal( - { - resetFields() - onClose() - }} - footer={ - - { - if (page === 'overview') { - onClose() - } else { - setPage('overview') - resetFields() - } - }} + + { + resetFields() + onClose() + }} + footer={ + - {page === 'overview' ? t('shared:cancel') : t('shared:back')} - - { - if (page === 'overview') { - onClose() - } else { - setPage('overview') - editPipettes( - labware, - pipettes, - orderedStepIds, - dispatch, - mount, - selectedPip as PipetteName, - selectedTips, - leftPipette, - rightPipette - ) + { + if (page === 'overview') { + onClose() + } else { + setPage('overview') + resetFields() + } + }} + > + {page === 'overview' ? t('shared:cancel') : t('shared:back')} + + - {t('shared:save')} - - - } - > - {page === 'overview' ? ( - - - - - {t('your_pipettes')} - - {has96Channel || - (leftPipette == null && rightPipette == null) ? null : ( - - dispatch( - changeSavedStepForm({ - stepId: INITIAL_DECK_SETUP_STEP_ID, - update: { - pipetteLocationUpdate: swapPipetteUpdate, - }, - }) - ) - } - > - - - - {t('swap')} - - - - )} - - - {leftPipette?.tiprackDefURI != null && leftInfo != null ? ( - { - setPage('add') - setMount('left') - setPipetteType(leftInfo.type) - setPipetteGen(leftInfo.gen) - setPipetteVolume(leftInfo.volume) - setSelectedTips(leftPipette.tiprackDefURI as string[]) - }} - cleanForm={() => { - dispatch(deletePipettes([leftPipette.id as string])) - previousLeftPipetteTipracks.forEach(tip => - dispatch(deleteContainer({ labwareId: tip.id })) - ) - }} - /> - ) : null} - {rightPipette?.tiprackDefURI != null && rightInfo != null ? ( - { - setPage('add') - setMount('right') - setPipetteType(rightInfo.type) - setPipetteGen(rightInfo.gen) - setPipetteVolume(rightInfo.volume) - setSelectedTips(rightPipette.tiprackDefURI as string[]) - }} - cleanForm={() => { - dispatch(deletePipettes([rightPipette.id as string])) - previousRightPipetteTipracks.forEach(tip => - dispatch(deleteContainer({ labwareId: tip.id })) - ) - }} - /> - ) : null} - {has96Channel || - (leftPipette != null && rightPipette != null) ? null : ( - { - setPage('add') - setMount(targetPipetteMount) - }} - text={t('add_pipette')} - textAlignment="left" - iconName="plus" - /> - )} - + {t('shared:save')} + - {robotType === FLEX_ROBOT_TYPE ? ( + } + > + {page === 'overview' ? ( + - {t('protocol_overview:your_gripper')} + {t('your_pipettes')} + {has96Channel || + (leftPipette == null && rightPipette == null) ? null : ( + + dispatch( + changeSavedStepForm({ + stepId: INITIAL_DECK_SETUP_STEP_ID, + update: { + pipetteLocationUpdate: swapPipetteUpdate, + }, + }) + ) + } + > + + + + {t('swap')} + + + + )} - {gripper != null ? ( - - - - - {t('protocol_overview:extension')} - - - {t('gripper')} - - - { - dispatch(toggleIsGripperRequired()) - }} - > - - {t('remove')} - - - - - ) : ( + {leftPipette?.tiprackDefURI != null && leftInfo != null ? ( + { + setPage('add') + setMount('left') + setPipetteType(leftInfo.type) + setPipetteGen(leftInfo.gen) + setPipetteVolume(leftInfo.volume) + setSelectedTips(leftPipette.tiprackDefURI as string[]) + }} + cleanForm={() => { + dispatch(deletePipettes([leftPipette.id as string])) + previousLeftPipetteTipracks.forEach(tip => + dispatch(deleteContainer({ labwareId: tip.id })) + ) + }} + /> + ) : null} + {rightPipette?.tiprackDefURI != null && rightInfo != null ? ( + { + setPage('add') + setMount('right') + setPipetteType(rightInfo.type) + setPipetteGen(rightInfo.gen) + setPipetteVolume(rightInfo.volume) + setSelectedTips(rightPipette.tiprackDefURI as string[]) + }} + cleanForm={() => { + dispatch(deletePipettes([rightPipette.id as string])) + previousRightPipetteTipracks.forEach(tip => + dispatch(deleteContainer({ labwareId: tip.id })) + ) + }} + /> + ) : null} + {has96Channel || + (leftPipette != null && rightPipette != null) ? null : ( { - dispatch(toggleIsGripperRequired()) + setPage('add') + setMount(targetPipetteMount) }} - text={t('protocol_overview:add_gripper')} + text={t('add_pipette')} textAlignment="left" iconName="plus" /> )} - ) : null} - - ) : ( - - - - {t('pipette_type')} - - - {PIPETTE_TYPES[robotType].map(type => { - return getShouldShowPipetteType( - type.value as PipetteType, - has96Channel, - leftPipette, - rightPipette, - mount - ) ? ( - { - setPipetteType(type.value) - setPipetteGen('flex') - setPipetteVolume(null) - setSelectedTips([]) - }} - buttonLabel={t(`shared:${type.label}`)} - buttonValue="single" - isSelected={pipetteType === type.value} - /> - ) : null - })} - + {robotType === FLEX_ROBOT_TYPE ? ( + + + + {t('protocol_overview:your_gripper')} + + + + {gripper != null ? ( + + + + + {t('protocol_overview:extension')} + + + {t('gripper')} + + + { + dispatch(toggleIsGripperRequired()) + }} + > + + {t('remove')} + + + + + ) : ( + { + dispatch(toggleIsGripperRequired()) + }} + text={t('protocol_overview:add_gripper')} + textAlignment="left" + iconName="plus" + /> + )} + + + ) : null} - {pipetteType != null && robotType === OT2_ROBOT_TYPE ? ( + ) : ( + - {t('pipette_gen')} + {t('pipette_type')} - {PIPETTE_GENS.map(gen => ( - { - setPipetteGen(gen) - setPipetteVolume(null) - setSelectedTips([]) - }} - buttonLabel={gen} - buttonValue={gen} - isSelected={pipetteGen === gen} - /> - ))} + {PIPETTE_TYPES[robotType].map(type => { + return getShouldShowPipetteType( + type.value as PipetteType, + has96Channel, + leftPipette, + rightPipette, + mount + ) ? ( + { + setPipetteType(type.value) + setPipetteGen('flex') + setPipetteVolume(null) + setSelectedTips([]) + }} + buttonLabel={t(`shared:${type.label}`)} + buttonValue="single" + isSelected={pipetteType === type.value} + /> + ) : null + })} - ) : null} - {(pipetteType != null && robotType === FLEX_ROBOT_TYPE) || - (pipetteGen !== 'flex' && - pipetteType != null && - robotType === OT2_ROBOT_TYPE) ? ( - - - {t('pipette_vol')} - - - {PIPETTE_VOLUMES[robotType]?.map(volume => { - if (robotType === FLEX_ROBOT_TYPE && pipetteType != null) { - const flexVolume = volume as PipetteInfoByType - const flexPipetteInfo = flexVolume[pipetteType] - - return flexPipetteInfo?.map(type => ( - { - setPipetteVolume(type.value) - setSelectedTips([]) - }} - buttonLabel={t('vol_label', { volume: type.label })} - buttonValue={type.value} - isSelected={pipetteVolume === type.value} - /> - )) - } else { - const ot2Volume = volume as PipetteInfoByGen - const gen = pipetteGen as Gen + {pipetteType != null && robotType === OT2_ROBOT_TYPE ? ( + + + {t('pipette_gen')} + + + {PIPETTE_GENS.map(gen => ( + { + setPipetteGen(gen) + setPipetteVolume(null) + setSelectedTips([]) + }} + buttonLabel={gen} + buttonValue={gen} + isSelected={pipetteGen === gen} + /> + ))} + + + ) : null} + {(pipetteType != null && robotType === FLEX_ROBOT_TYPE) || + (pipetteGen !== 'flex' && + pipetteType != null && + robotType === OT2_ROBOT_TYPE) ? ( + + + {t('pipette_vol')} + + + {PIPETTE_VOLUMES[robotType]?.map(volume => { + if (robotType === FLEX_ROBOT_TYPE && pipetteType != null) { + const flexVolume = volume as PipetteInfoByType + const flexPipetteInfo = flexVolume[pipetteType] - return ot2Volume[gen].map(info => { - return info[pipetteType]?.map(type => ( + return flexPipetteInfo?.map(type => ( { setPipetteVolume(type.value) + setSelectedTips([]) }} - buttonLabel={t('vol_label', { - volume: type.label, - })} + buttonLabel={t('vol_label', { volume: type.label })} buttonValue={type.value} isSelected={pipetteVolume === type.value} /> )) - }) - } - })} + } else { + const ot2Volume = volume as PipetteInfoByGen + const gen = pipetteGen as Gen + + return ot2Volume[gen].map(info => { + return info[pipetteType]?.map(type => ( + { + setPipetteVolume(type.value) + }} + buttonLabel={t('vol_label', { + volume: type.label, + })} + buttonValue={type.value} + isSelected={pipetteVolume === type.value} + /> + )) + }) + } + })} + - - ) : null} - {allPipetteOptions.includes(selectedPip as PipetteName) - ? (() => { - const tiprackOptions = getTiprackOptions({ - allLabware, - allowAllTipracks, - selectedPipetteName: selectedPip, - }) - return ( - - - {t('pipette_tips')} - - { + const tiprackOptions = getTiprackOptions({ + allLabware, + allowAllTipracks, + selectedPipetteName: selectedPip, + }) + return ( + - {tiprackOptions.map(option => ( - { - const updatedTips = selectedTips.includes( - option.value - ) - ? selectedTips.filter(v => v !== option.value) - : [...selectedTips, option.value] - setSelectedTips(updatedTips) - }} - /> - ))} - + {t('pipette_tips')} + + - - - {t('add_custom_tips')} - - dispatch(createCustomTiprackDef(e))} - /> - - {pipetteVolume === 'p1000' && - robotType === FLEX_ROBOT_TYPE ? null : ( - ( + { - dispatch( - setFeatureFlags({ - OT_PD_ALLOW_ALL_TIPRACKS: !allowAllTipracks, - }) + const updatedTips = selectedTips.includes( + option.value ) + ? selectedTips.filter(v => v !== option.value) + : [...selectedTips, option.value] + setSelectedTips(updatedTips) }} - textDecoration={TYPOGRAPHY.textDecorationUnderline} - > + /> + ))} + + - {allowAllTipracks - ? t('show_default_tips') - : t('show_all_tips')} + {t('add_custom_tips')} - - )} - - - - ) - })() - : null} - - )} - , + + dispatch(createCustomTiprackDef(e)) + } + /> + + {pipetteVolume === 'p1000' && + robotType === FLEX_ROBOT_TYPE ? null : ( + { + dispatch( + setFeatureFlags({ + OT_PD_ALLOW_ALL_TIPRACKS: !allowAllTipracks, + }) + ) + }} + textDecoration={ + TYPOGRAPHY.textDecorationUnderline + } + > + + {allowAllTipracks + ? t('show_default_tips') + : t('show_all_tips')} + + + )} + + + + ) + })() + : null} + + )} + + , getTopPortalEl() ) } diff --git a/protocol-designer/src/organisms/EditNickNameModal/index.tsx b/protocol-designer/src/organisms/EditNickNameModal/index.tsx index bfead26d352..4b8934ff6c8 100644 --- a/protocol-designer/src/organisms/EditNickNameModal/index.tsx +++ b/protocol-designer/src/organisms/EditNickNameModal/index.tsx @@ -17,6 +17,7 @@ import { import { selectors as uiLabwareSelectors } from '../../ui/labware' import { getTopPortalEl } from '../../components/portals/TopPortal' import { renameLabware } from '../../labware-ingred/actions' +import { HandleEnter } from '../../atoms/HandleEnter' import type { ThunkDispatch } from '../../types' const MAX_NICK_NAME_LENGTH = 115 @@ -37,57 +38,59 @@ export function EditNickNameModal(props: EditNickNameModalProps): JSX.Element { } return createPortal( - + + { + onClose() + }} + > + {t('shared:cancel')} + + = MAX_NICK_NAME_LENGTH} + > + {t('shared:save')} + + + } + > - { - onClose() + + + {t('labware_name')} + + + = MAX_NICK_NAME_LENGTH ? t('rename_error') : null + } + data-testid="renameLabware_inputField" + name="renameLabware" + onChange={e => { + setNickName(e.target.value) }} - > - {t('shared:cancel')} - - = MAX_NICK_NAME_LENGTH} - > - {t('shared:save')} - - - } - > - - - - {t('labware_name')} - + value={nickName} + type="text" + autoFocus + /> - = MAX_NICK_NAME_LENGTH ? t('rename_error') : null - } - data-testid="renameLabware_inputField" - name="renameLabware" - onChange={e => { - setNickName(e.target.value) - }} - value={nickName} - type="text" - autoFocus - /> - - , + + , getTopPortalEl() ) } diff --git a/protocol-designer/src/organisms/IncompatibleTipsModal/index.tsx b/protocol-designer/src/organisms/IncompatibleTipsModal/index.tsx index b509e47ffb0..57dee3964ad 100644 --- a/protocol-designer/src/organisms/IncompatibleTipsModal/index.tsx +++ b/protocol-designer/src/organisms/IncompatibleTipsModal/index.tsx @@ -10,6 +10,8 @@ import { StyledText, } from '@opentrons/components' import { setFeatureFlags } from '../../feature-flags/actions' +import { HandleEnter } from '../../atoms/HandleEnter' + import type { ThunkDispatch } from 'redux-thunk' import type { BaseState } from '../../types' @@ -23,37 +25,41 @@ export function IncompatibleTipsModal( const dispatch = useDispatch>() const { t } = useTranslation(['create_new_protocol', 'shared']) + const handleShowAllTips = (): void => { + onClose() + dispatch( + setFeatureFlags({ + OT_PD_ALLOW_ALL_TIPRACKS: true, + }) + ) + } + return ( - - { - onClose() - dispatch( - setFeatureFlags({ - OT_PD_ALLOW_ALL_TIPRACKS: true, - }) - ) - }} + + - {t('show_tips')} - - {t('shared:cancel')} - - } - > - - {t('incompatible_tip_body')} - - + + {t('show_tips')} + + + {t('shared:cancel')} + + + } + > + + {t('incompatible_tip_body')} + + + ) } diff --git a/protocol-designer/src/organisms/MaterialsListModal/index.tsx b/protocol-designer/src/organisms/MaterialsListModal/index.tsx index 71ca8b5fb2c..0d5c90c1f4b 100644 --- a/protocol-designer/src/organisms/MaterialsListModal/index.tsx +++ b/protocol-designer/src/organisms/MaterialsListModal/index.tsx @@ -31,6 +31,7 @@ import { getRobotType } from '../../file-data/selectors' import { getInitialDeckSetup } from '../../step-forms/selectors' import { getTopPortalEl } from '../../components/portals/TopPortal' import { selectors as labwareIngredSelectors } from '../../labware-ingred/selectors' +import { HandleEnter } from '../../atoms/HandleEnter' import type { AdditionalEquipmentName } from '@opentrons/step-generation' import type { LabwareOnDeck, ModuleOnDeck } from '../../step-forms' @@ -69,212 +70,218 @@ export function MaterialsListModal({ ) const tCSlot = robotType === FLEX_ROBOT_TYPE ? 'A1, B1' : '7,8,10,11' + const handleClose = (): void => { + setShowMaterialsListModal(false) + } + return createPortal( - { - setShowMaterialsListModal(false) - }} - closeOnOutsideClick - title={t('materials_list')} - marginLeft="0rem" - minWidth={MODAL_MIN_WIDTH} - > - - - - {t('deck_hardware')} - - - {fixtures.length > 0 - ? fixtures.map(fixture => ( - - - ) : ( - '' - ) - } - content={ - - - {t(`shared:${fixture.name}`)} - - - } - /> - - )) - : null} - {hardware.length > 0 ? ( - hardware.map((hw, id) => { - const formatLocation = (slot: string): string => { - if (hw.type === THERMOCYCLER_MODULE_TYPE) { - return tCSlot + + + + + + {t('deck_hardware')} + + + {fixtures.length > 0 + ? fixtures.map(fixture => ( + + + ) : ( + '' + ) + } + content={ + + + {t(`shared:${fixture.name}`)} + + + } + /> + + )) + : null} + {hardware.length > 0 ? ( + hardware.map((hw, id) => { + const formatLocation = (slot: string): string => { + if (hw.type === THERMOCYCLER_MODULE_TYPE) { + return tCSlot + } + return slot.replace('cutout', '') } - return slot.replace('cutout', '') - } - return ( - - - } - content={ - - - - {getModuleDisplayName(hw.model)} - - - } - /> - - ) - }) - ) : ( - - )} + return ( + + + } + content={ + + + + {getModuleDisplayName(hw.model)} + + + } + /> + + ) + }) + ) : ( + + )} + - - - - {t('labware')} - - - {labware.length > 0 ? ( - labware.map(lw => { - const labwareOnModuleEntity = Object.values(modulesOnDeck).find( - mod => mod.id === lw.slot - ) - const labwareOnLabwareEntity = Object.values( - labwareOnDeck - ).find(labware => labware.id === lw.slot) - const labwareOnLabwareOnModuleSlot = Object.values( - modulesOnDeck - ).find(mod => mod.id === labwareOnLabwareEntity?.slot)?.slot - const labwareOnLabwareOnSlot = labwareOnLabwareEntity?.slot + + + {t('labware')} + + + {labware.length > 0 ? ( + labware.map(lw => { + const labwareOnModuleEntity = Object.values( + modulesOnDeck + ).find(mod => mod.id === lw.slot) + const labwareOnLabwareEntity = Object.values( + labwareOnDeck + ).find(labware => labware.id === lw.slot) + const labwareOnLabwareOnModuleSlot = Object.values( + modulesOnDeck + ).find(mod => mod.id === labwareOnLabwareEntity?.slot)?.slot + const labwareOnLabwareOnSlot = labwareOnLabwareEntity?.slot - let deckLabelSlot = lw.slot - if (labwareOnModuleEntity != null) { - deckLabelSlot = - labwareOnModuleEntity.type === THERMOCYCLER_MODULE_TYPE - ? tCSlot - : labwareOnModuleEntity.slot - } else if (labwareOnLabwareOnModuleSlot != null) { - deckLabelSlot = labwareOnLabwareOnModuleSlot - } else if (labwareOnLabwareOnSlot != null) { - deckLabelSlot = labwareOnLabwareOnSlot - } else if (deckLabelSlot === 'offDeck') { - deckLabelSlot = 'Off-deck' - } - return ( - - } - content={lw.def.metadata.displayName} - /> - - ) - }) - ) : ( - - )} + let deckLabelSlot = lw.slot + if (labwareOnModuleEntity != null) { + deckLabelSlot = + labwareOnModuleEntity.type === THERMOCYCLER_MODULE_TYPE + ? tCSlot + : labwareOnModuleEntity.slot + } else if (labwareOnLabwareOnModuleSlot != null) { + deckLabelSlot = labwareOnLabwareOnModuleSlot + } else if (labwareOnLabwareOnSlot != null) { + deckLabelSlot = labwareOnLabwareOnSlot + } else if (deckLabelSlot === 'offDeck') { + deckLabelSlot = 'Off-deck' + } + return ( + + + } + content={lw.def.metadata.displayName} + /> + + ) + }) + ) : ( + + )} + - - - - {t('liquids')} - - - {liquids.length > 0 ? ( - - - {t('name')} - - - {t('total_well_volume')} - - - ) : null} - + + + {t('liquids')} + + {liquids.length > 0 ? ( - liquids.map((liquid, id) => { - const volumePerWell = Object.values( - allLabwareWellContents - ).flatMap(labwareWithIngred => - Object.values(labwareWithIngred).map( - ingred => ingred[liquid.ingredientId]?.volume ?? 0 + + + {t('name')} + + + {t('total_well_volume')} + + + ) : null} + + {liquids.length > 0 ? ( + liquids.map((liquid, id) => { + const volumePerWell = Object.values( + allLabwareWellContents + ).flatMap(labwareWithIngred => + Object.values(labwareWithIngred).map( + ingred => ingred[liquid.ingredientId]?.volume ?? 0 + ) ) - ) - const totalVolume = sum(volumePerWell) + const totalVolume = sum(volumePerWell) - if (totalVolume === 0) { - return null - } else { - return ( - - + if (totalVolume === 0) { + return null + } else { + return ( + - - - {liquid.name ?? t('n/a')} - - + + + + {liquid.name ?? t('n/a')} + + - - + + + - - - ) - } - }) - ) : ( - - )} + + ) + } + }) + ) : ( + + )} + - - , + + , getTopPortalEl() ) } diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/AddMetadata.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/AddMetadata.tsx index a76f7bb4dcb..74f3b3d4690 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/AddMetadata.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/AddMetadata.tsx @@ -11,7 +11,7 @@ import { } from '@opentrons/components' import { InputField } from '../../components/modals/CreateFileWizard/InputField' import { WizardBody } from './WizardBody' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import type { WizardTileProps } from './types' diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectFixtures.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectFixtures.tsx index caebeecbe99..e21122653ed 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectFixtures.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectFixtures.tsx @@ -22,7 +22,7 @@ import { getNumOptions, getNumSlotsAvailable, } from './utils' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import type { DropdownBorder } from '@opentrons/components' import type { AdditionalEquipment, WizardTileProps } from './types' diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectGripper.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectGripper.tsx index 88dc6ab031d..611887ef6f9 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectGripper.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectGripper.tsx @@ -11,7 +11,7 @@ import { DIRECTION_COLUMN, } from '@opentrons/components' import { WizardBody } from './WizardBody' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import type { WizardTileProps } from './types' diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx index 4cf2576c25e..b3bf5e225fa 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectModules.tsx @@ -25,10 +25,7 @@ import { TEMPERATURE_MODULE_TYPE, } from '@opentrons/shared-data' import { uuid } from '../../utils' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../feature-flags/selectors' import { useKitchen } from '../../organisms/Kitchen/hooks' import { ModuleDiagram } from '../../components/modules' import { WizardBody } from './WizardBody' @@ -39,7 +36,7 @@ import { OT2_SUPPORTED_MODULE_MODELS, } from './constants' import { getNumOptions, getNumSlotsAvailable } from './utils' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import type { DropdownBorder } from '@opentrons/components' import type { ModuleModel, ModuleType } from '@opentrons/shared-data' @@ -56,7 +53,6 @@ export function SelectModules(props: WizardTileProps): JSX.Element | null { const fields = watch('fields') const modules = watch('modules') const additionalEquipment = watch('additionalEquipment') - const enableMoam = useSelector(getEnableMoam) const enableAbsorbanceReader = useSelector(getEnableAbsorbanceReader) const robotType = fields.robotType const supportedModules = @@ -83,9 +79,11 @@ export function SelectModules(props: WizardTileProps): JSX.Element | null { ) ) ) - const MOAM_MODULE_TYPES: ModuleType[] = enableMoam - ? [TEMPERATURE_MODULE_TYPE, HEATERSHAKER_MODULE_TYPE, MAGNETIC_BLOCK_TYPE] - : [TEMPERATURE_MODULE_TYPE] + const MOAM_MODULE_TYPES: ModuleType[] = [ + TEMPERATURE_MODULE_TYPE, + HEATERSHAKER_MODULE_TYPE, + MAGNETIC_BLOCK_TYPE, + ] const handleAddModule = (moduleModel: ModuleModel): void => { if (hasNoAvailableSlots) { diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectPipettes.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectPipettes.tsx index f15a716c617..115a8a46f9d 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectPipettes.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectPipettes.tsx @@ -41,7 +41,7 @@ import { BUTTON_LINK_STYLE } from '../../atoms' import { WizardBody } from './WizardBody' import { PIPETTE_GENS, PIPETTE_TYPES, PIPETTE_VOLUMES } from './constants' import { getTiprackOptions } from './utils' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import { removeOpentronsPhrases } from '../../utils' import type { ThunkDispatch } from 'redux-thunk' diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectRobot.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectRobot.tsx index 3d2894bd4a6..b6ab28893e6 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/SelectRobot.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/SelectRobot.tsx @@ -8,7 +8,7 @@ import { } from '@opentrons/components' import { FLEX_ROBOT_TYPE, OT2_ROBOT_TYPE } from '@opentrons/shared-data' import { WizardBody } from './WizardBody' -import { HandleEnter } from './HandleEnter' +import { HandleEnter } from '../../atoms/HandleEnter' import type { WizardTileProps } from './types' export function SelectRobot(props: WizardTileProps): JSX.Element { diff --git a/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx b/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx index bddd3174b7f..a18f06bf508 100644 --- a/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx +++ b/protocol-designer/src/pages/CreateNewProtocolWizard/__tests__/SelectModules.test.tsx @@ -4,10 +4,7 @@ import '@testing-library/jest-dom/vitest' import { FLEX_ROBOT_TYPE, OT2_ROBOT_TYPE } from '@opentrons/shared-data' import { fireEvent, screen } from '@testing-library/react' import { i18n } from '../../../assets/localization' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../feature-flags/selectors' import { renderWithProviders } from '../../../__testing-utils__' import { SelectModules } from '../SelectModules' import type { WizardFormState, WizardTileProps } from '../types' @@ -46,7 +43,6 @@ describe('SelectModules', () => { props = { ...mockWizardTileProps, } as WizardTileProps - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(getEnableAbsorbanceReader).mockReturnValue(true) }) diff --git a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx index 20975e76c88..e062fa4784d 100644 --- a/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx +++ b/protocol-designer/src/pages/Designer/DeckSetup/DeckSetupTools.tsx @@ -37,14 +37,11 @@ import { selectNestedLabware, selectZoomedIntoSlot, } from '../../../labware-ingred/actions' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../feature-flags/selectors' import { selectors } from '../../../labware-ingred/selectors' import { useKitchen } from '../../../organisms/Kitchen/hooks' import { createContainerAboveModule } from '../../../step-forms/actions/thunks' -import { FIXTURES, MOAM_MODELS, MOAM_MODELS_WITH_FF } from './constants' +import { FIXTURES, MOAM_MODELS } from './constants' import { getSlotInformation } from '../utils' import { getModuleModelsBySlot, getDeckErrors } from './utils' import { LabwareTools } from './LabwareTools' @@ -70,7 +67,6 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { const robotType = useSelector(getRobotType) const dispatch = useDispatch>() const enableAbsorbanceReader = useSelector(getEnableAbsorbanceReader) - const enableMoam = useSelector(getEnableMoam) const deckSetup = useSelector(getDeckSetupForActiveItem) const { selectedLabwareDefUri, @@ -161,6 +157,8 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { } const handleClear = (): void => { + onDeckProps?.setHoveredModule(null) + onDeckProps?.setHoveredFixture(null) if (slot !== 'offDeck') { // clear module from slot if (createdModuleForSlot != null) { @@ -291,9 +289,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { module.type === getModuleType(model) && module.slot !== slot ) - const moamModels = enableMoam - ? MOAM_MODELS - : MOAM_MODELS_WITH_FF + const moamModels = MOAM_MODELS const collisionError = getDeckErrors({ modules: deckSetupModules, @@ -312,6 +308,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { }} setHovered={() => { if (onDeckProps?.setHoveredModule != null) { + onDeckProps.setHoveredFixture(null) onDeckProps.setHoveredModule(model) } }} @@ -390,6 +387,7 @@ export function DeckSetupTools(props: DeckSetupToolsProps): JSX.Element | null { }} setHovered={() => { if (onDeckProps?.setHoveredFixture != null) { + onDeckProps.setHoveredModule(null) onDeckProps.setHoveredFixture(fixture) } }} diff --git a/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx b/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx index a3d63343389..cb85cf12693 100644 --- a/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx +++ b/protocol-designer/src/pages/Designer/DeckSetup/__tests__/DeckSetupTools.test.tsx @@ -12,10 +12,7 @@ import { renderWithProviders } from '../../../../__testing-utils__' import { deleteContainer } from '../../../../labware-ingred/actions' import { createModule, deleteModule } from '../../../../step-forms/actions' import { getRobotType } from '../../../../file-data/selectors' -import { - getEnableAbsorbanceReader, - getEnableMoam, -} from '../../../../feature-flags/selectors' +import { getEnableAbsorbanceReader } from '../../../../feature-flags/selectors' import { createDeckFixture, deleteDeckFixture, @@ -63,7 +60,6 @@ describe('DeckSetupTools', () => { vi.mocked(LabwareTools).mockReturnValue(
mock labware tools
) vi.mocked(getRobotType).mockReturnValue(FLEX_ROBOT_TYPE) vi.mocked(getEnableAbsorbanceReader).mockReturnValue(true) - vi.mocked(getEnableMoam).mockReturnValue(true) vi.mocked(getDeckSetupForActiveItem).mockReturnValue({ labware: {}, modules: {}, diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/StepForm/index.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/StepForm/index.tsx index 26d1daa324e..0272a35e618 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/StepForm/index.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/StepForm/index.tsx @@ -2,6 +2,12 @@ import { useState } from 'react' import { useTranslation } from 'react-i18next' import { connect } from 'react-redux' import { useConditionalConfirm } from '@opentrons/components' +import { + HEATERSHAKER_MODULE_TYPE, + TEMPERATURE_MODULE_TYPE, + getModuleDisplayName, +} from '@opentrons/shared-data' + import { actions } from '../../../../steplist' import { actions as stepsActions } from '../../../../ui/steps' import { @@ -17,7 +23,6 @@ import { DELETE_STEP_FORM, } from '../../../../components/modals/ConfirmDeleteModal' import { AutoAddPauseUntilTempStepModal } from '../../../../components/modals/AutoAddPauseUntilTempStepModal' -import { AutoAddPauseUntilHeaterShakerTempStepModal } from '../../../../components/modals/AutoAddPauseUntilHeaterShakerTempStepModal' import { getDirtyFields, makeSingleEditFieldProps } from './utils' import { StepFormToolbox } from './StepFormToolbox' @@ -159,20 +164,30 @@ function StepFormManager(props: StepFormManagerProps): JSX.Element | null { onContinueClick={confirmClose} /> )} - {showAddPauseUntilTempStepModal && ( + {showAddPauseUntilTempStepModal || + showAddPauseUntilHeaterShakerTempStepModal ? ( - )} - {showAddPauseUntilHeaterShakerTempStepModal && ( - - )} + ) : null} (false) const isStartingOrEndingState = title === STARTING_DECK_STATE || title === FINAL_DECK_STATE + const dispatch = useDispatch>() + const multiSelectItemIds = useSelector(getMultiSelectItemIds) let backgroundColor = isStartingOrEndingState ? COLORS.blue20 : COLORS.grey20 let color = COLORS.black90 @@ -78,6 +96,17 @@ export function StepContainer(props: StepContainerProps): JSX.Element { color = COLORS.white } + const handleClick = (event: MouseEvent): void => { + const wasOutside = !( + event.target instanceof Node && + menuRootRef.current?.contains(event.target) + ) + + if (wasOutside && stepOverflowMenu) { + setStepOverflowMenu(false) + } + } + const handleOverflowClick = (event: React.MouseEvent): void => { const { clientY } = event @@ -100,19 +129,66 @@ export function StepContainer(props: StepContainerProps): JSX.Element { } }) - const handleClick = (event: MouseEvent): void => { - const wasOutside = !( - event.target instanceof Node && - menuRootRef.current?.contains(event.target) - ) + const handleStepItemSelection = (): void => { + if (stepId != null) { + dispatch(populateForm(stepId)) + } + setStepOverflowMenu(false) + } - if (wasOutside && stepOverflowMenu) { - setStepOverflowMenu(false) + const onDeleteClickAction = (): void => { + if (multiSelectItemIds) { + dispatch(steplistActions.deleteMultipleSteps(multiSelectItemIds)) + dispatch(deselectAllSteps('EXIT_BATCH_EDIT_MODE_BUTTON_PRESS')) + } else { + console.warn( + 'something went wrong, you cannot delete multiple steps if none are selected' + ) + } + } + + const { + confirm: confirmMultiDelete, + showConfirmation: showMultiDeleteConfirmation, + cancel: cancelMultiDelete, + } = useConditionalConfirm(onDeleteClickAction, true) + + const deleteStep = (stepId: StepIdType): void => { + dispatch(steplistActions.deleteStep(stepId)) + } + + const handleDelete = (): void => { + if (stepId != null) { + deleteStep(stepId) + } else { + console.warn( + 'something went wrong, cannot delete a step without a step id' + ) } } + const { + confirm: confirmDelete, + showConfirmation: showDeleteConfirmation, + cancel: cancelDelete, + } = useConditionalConfirm(handleDelete, true) + return ( <> + {showDeleteConfirmation && ( + + )} + {showMultiDeleteConfirmation && ( + + )} , getTopPortalEl() ) diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/StepOverflowMenu.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/StepOverflowMenu.tsx index 5078ff4c0e5..b83198d2d81 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/StepOverflowMenu.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/StepOverflowMenu.tsx @@ -12,175 +12,68 @@ import { NO_WRAP, POSITION_ABSOLUTE, SPACING, - useConditionalConfirm, } from '@opentrons/components' -import { actions as steplistActions } from '../../../../steplist' -import { - getMultiSelectItemIds, - actions as stepsActions, -} from '../../../../ui/steps' -import { - CLOSE_BATCH_EDIT_FORM, - CLOSE_STEP_FORM_WITH_CHANGES, - CLOSE_UNSAVED_STEP_FORM, - ConfirmDeleteModal, - DELETE_MULTIPLE_STEP_FORMS, - DELETE_STEP_FORM, -} from '../../../../components/modals/ConfirmDeleteModal' +import { actions as stepsActions } from '../../../../ui/steps' import { hoverOnStep, toggleViewSubstep, - populateForm, - deselectAllSteps, } from '../../../../ui/steps/actions/actions' import { - getBatchEditFormHasUnsavedChanges, - getCurrentFormHasUnsavedChanges, - getCurrentFormIsPresaved, getSavedStepForms, getUnsavedForm, } from '../../../../step-forms/selectors' -import { deleteMultipleSteps } from '../../../../steplist/actions' -import { duplicateMultipleSteps } from '../../../../ui/steps/actions/thunks' -import type * as React from 'react' import type { ThunkDispatch } from 'redux-thunk' import type { BaseState } from '../../../../types' import type { StepIdType } from '../../../../form-types' -import type { DeleteModalType } from '../../../../components/modals/ConfirmDeleteModal' interface StepOverflowMenuProps { stepId: string menuRootRef: React.MutableRefObject top: number setStepOverflowMenu: React.Dispatch> + handleEdit: () => void + confirmDelete: () => void + confirmMultiDelete: () => void + multiSelectItemIds: string[] | null } export function StepOverflowMenu(props: StepOverflowMenuProps): JSX.Element { - const { stepId, menuRootRef, top, setStepOverflowMenu } = props + const { + stepId, + menuRootRef, + top, + setStepOverflowMenu, + handleEdit, + confirmDelete, + confirmMultiDelete, + multiSelectItemIds, + } = props const { t } = useTranslation('protocol_steps') - const multiSelectItemIds = useSelector(getMultiSelectItemIds) const dispatch = useDispatch>() - const deleteStep = (stepId: StepIdType): void => { - dispatch(steplistActions.deleteStep(stepId)) - } const formData = useSelector(getUnsavedForm) const savedStepFormData = useSelector(getSavedStepForms)[stepId] - const currentFormIsPresaved = useSelector(getCurrentFormIsPresaved) - const singleEditFormHasUnsavedChanges = useSelector( - getCurrentFormHasUnsavedChanges - ) - const batchEditFormHasUnsavedChanges = useSelector( - getBatchEditFormHasUnsavedChanges - ) + const isPipetteStep = + savedStepFormData.stepType === 'moveLiquid' || + savedStepFormData.stepType === 'mix' + const isThermocyclerStep = savedStepFormData.stepType === 'thermocycler' + const duplicateStep = ( stepId: StepIdType ): ReturnType => dispatch(stepsActions.duplicateStep(stepId)) - const handleStepItemSelection = (): void => { - dispatch(populateForm(stepId)) - setStepOverflowMenu(false) - } - const handleDelete = (): void => { - if (stepId != null) { - deleteStep(stepId) - } else { - console.warn( - 'something went wrong, cannot delete a step without a step id' - ) - } - } - const onDuplicateClickAction = (): void => { + const duplicateMultipleSteps = (): void => { if (multiSelectItemIds) { - dispatch(duplicateMultipleSteps(multiSelectItemIds)) + dispatch(stepsActions.duplicateMultipleSteps(multiSelectItemIds)) } else { console.warn( 'something went wrong, you cannot duplicate multiple steps if none are selected' ) } } - const onDeleteClickAction = (): void => { - if (multiSelectItemIds) { - dispatch(deleteMultipleSteps(multiSelectItemIds)) - dispatch(deselectAllSteps('EXIT_BATCH_EDIT_MODE_BUTTON_PRESS')) - } else { - console.warn( - 'something went wrong, you cannot delete multiple steps if none are selected' - ) - } - } - - const { confirm, showConfirmation, cancel } = useConditionalConfirm( - handleStepItemSelection, - currentFormIsPresaved || singleEditFormHasUnsavedChanges - ) - const { - confirm: confirmDuplicate, - showConfirmation: showDuplicateConfirmation, - cancel: cancelDuplicate, - } = useConditionalConfirm( - onDuplicateClickAction, - batchEditFormHasUnsavedChanges - ) - - const { - confirm: confirmMultiDelete, - showConfirmation: showMultiDeleteConfirmation, - cancel: cancelMultiDelete, - } = useConditionalConfirm(onDeleteClickAction, true) - - const { - confirm: confirmDelete, - showConfirmation: showDeleteConfirmation, - cancel: cancelDelete, - } = useConditionalConfirm(handleDelete, true) - - const getModalType = (): DeleteModalType => { - if (currentFormIsPresaved) { - return CLOSE_UNSAVED_STEP_FORM - } else { - return CLOSE_STEP_FORM_WITH_CHANGES - } - } - const isPipetteStep = - savedStepFormData.stepType === 'moveLiquid' || - savedStepFormData.stepType === 'mix' - const isThermocyclerStep = savedStepFormData.stepType === 'thermocycler' return ( <> - {/* TODO: update this modal */} - {showConfirmation && ( - - )} - {/* TODO: update this modal */} - {showDuplicateConfirmation && ( - - )} - {/* TODO: update this modal */} - {showMultiDeleteConfirmation && ( - - )} - {/* TODO: update this modal */} - {showDeleteConfirmation && ( - - )} {multiSelectItemIds != null && multiSelectItemIds.length > 0 ? ( <> - + { + duplicateMultipleSteps() + setStepOverflowMenu(false) + }} + > {t('duplicate_steps')} - + + { + confirmMultiDelete() + setStepOverflowMenu(false) + }} + > {t('delete_steps')} ) : ( <> {formData != null ? null : ( - {t('edit_step')} + {t('edit_step')} )} {isPipetteStep || isThermocyclerStep ? ( { duplicateStep(stepId) + setStepOverflowMenu(false) }} > {t('duplicate')} - {t('delete')} + { + confirmDelete() + setStepOverflowMenu(false) + }} + > + {t('delete')} + )} diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepContainer.test.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepContainer.test.tsx index 93ea0baab62..0f5981906d6 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepContainer.test.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepContainer.test.tsx @@ -9,6 +9,8 @@ import { StepContainer } from '../StepContainer' import { StepOverflowMenu } from '../StepOverflowMenu' vi.mock('../../../../../step-forms/selectors') +vi.mock('../../../../../ui/steps/actions/actions') +vi.mock('../../../../../ui/steps/selectors') vi.mock('../StepOverflowMenu') const render = (props: React.ComponentProps) => { diff --git a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepOverflowMenu.test.tsx b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepOverflowMenu.test.tsx index f37d2114c74..597771e0854 100644 --- a/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepOverflowMenu.test.tsx +++ b/protocol-designer/src/pages/Designer/ProtocolSteps/Timeline/__tests__/StepOverflowMenu.test.tsx @@ -29,6 +29,8 @@ vi.mock('../../../../../step-forms/selectors') vi.mock('../../../../../ui/steps/actions/actions') vi.mock('../../../../../ui/steps/actions/thunks') vi.mock('../../../../../steplist/actions') +vi.mock('../../../../../feature-flags/selectors') + vi.mock('@opentrons/components', async importOriginal => { const actual = await importOriginal() return { @@ -56,6 +58,10 @@ describe('StepOverflowMenu', () => { top: 0, menuRootRef: { current: null }, setStepOverflowMenu: vi.fn(), + multiSelectItemIds: [], + handleEdit: vi.fn(), + confirmDelete: mockConfirm, + confirmMultiDelete: vi.fn(), } vi.mocked(getMultiSelectItemIds).mockReturnValue(null) vi.mocked(getCurrentFormIsPresaved).mockReturnValue(false) @@ -71,24 +77,19 @@ describe('StepOverflowMenu', () => { it('renders each button and clicking them calls the action', () => { render(props) - fireEvent.click(screen.getAllByText('Delete step')[0]) - screen.getByText('Are you sure you want to delete this step?') - fireEvent.click(screen.getByText('delete step')) + fireEvent.click(screen.getByText('Delete step')) expect(mockConfirm).toHaveBeenCalled() fireEvent.click(screen.getByText('Duplicate step')) expect(vi.mocked(stepsActions.duplicateStep)).toHaveBeenCalled() fireEvent.click(screen.getByText('Edit step')) - expect(mockConfirm).toHaveBeenCalled() fireEvent.click(screen.getByText('View details')) expect(vi.mocked(hoverOnStep)).toHaveBeenCalled() expect(vi.mocked(toggleViewSubstep)).toHaveBeenCalled() }) it('renders the multi select overflow menu', () => { - vi.mocked(getMultiSelectItemIds).mockReturnValue(['1', '2']) - render(props) + render({ ...props, multiSelectItemIds: ['abc', '123'] }) screen.getByText('Duplicate steps') screen.getByText('Delete steps') - screen.getByText('Delete multiple steps') }) }) diff --git a/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx b/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx index dda66237feb..36caf29c4ad 100644 --- a/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx +++ b/protocol-designer/src/pages/ProtocolOverview/DeckThumbnailDetails.tsx @@ -10,6 +10,7 @@ import { inferModuleOrientationFromXCoordinate, isAddressableAreaStandardSlot, THERMOCYCLER_MODULE_TYPE, + SPAN7_8_10_11_SLOT, } from '@opentrons/shared-data' import { LabwareOnDeck } from '../../components/DeckSetup/LabwareOnDeck' import { getStagingAreaAddressableAreas } from '../../utils' @@ -65,22 +66,23 @@ export const DeckThumbnailDetails = ( return ( <> {/* all modules */} - {allModules.map(moduleOnDeck => { - const slotId = moduleOnDeck.slot + {allModules.map(({ id, slot, model, type, moduleState }) => { + const slotId = + slot === SPAN7_8_10_11_SLOT && type === THERMOCYCLER_MODULE_TYPE + ? '7' + : slot const slotPosition = getPositionFromSlotId(slotId, deckDef) if (slotPosition == null) { - console.warn(`no slot ${slotId} for module ${moduleOnDeck.id}`) + console.warn(`no slot ${slotId} for module ${id}`) return null } - const moduleDef = getModuleDef2(moduleOnDeck.model) - const labwareLoadedOnModule = allLabware.find( - lw => lw.slot === moduleOnDeck.id - ) + const moduleDef = getModuleDef2(model) + const labwareLoadedOnModule = allLabware.find(lw => lw.slot === id) return ( - + -

{t('alert:export_warnings.no_commands.body1')}

-

{t('alert:export_warnings.no_commands.body2')}

- + + {t('alert:export_warnings.no_commands.redesign.body')} + + ), + titleElement: ( + ), - heading: t('alert:export_warnings.no_commands.heading'), } } diff --git a/protocol-designer/src/pages/ProtocolOverview/__tests__/ProtocolOverview.test.tsx b/protocol-designer/src/pages/ProtocolOverview/__tests__/ProtocolOverview.test.tsx index e8536e4a549..50d9d48e7bd 100644 --- a/protocol-designer/src/pages/ProtocolOverview/__tests__/ProtocolOverview.test.tsx +++ b/protocol-designer/src/pages/ProtocolOverview/__tests__/ProtocolOverview.test.tsx @@ -28,8 +28,9 @@ vi.mock('../../../step-forms/selectors') vi.mock('../../../file-data/selectors') vi.mock('../../../organisms/MaterialsListModal') vi.mock('../../../labware-ingred/selectors') +vi.mock('../../../load-file/actions') +vi.mock('../../../feature-flags/selectors') vi.mock('../../../organisms') -vi.mock('../../../labware-ingred/selectors') vi.mock('../ProtocolMetadata') vi.mock('../LiquidDefinitions') vi.mock('../InstrumentsInfo') diff --git a/protocol-designer/src/pages/ProtocolOverview/index.tsx b/protocol-designer/src/pages/ProtocolOverview/index.tsx index 0b402ed3884..7f9969575c5 100644 --- a/protocol-designer/src/pages/ProtocolOverview/index.tsx +++ b/protocol-designer/src/pages/ProtocolOverview/index.tsx @@ -203,16 +203,16 @@ export function ProtocolOverview(): JSX.Element { fixtureWithoutStep.wasteChute || fixtureWithoutStep.stagingAreaSlots.length > 0 - const warning = - hasWarning && - getWarningContent({ - noCommands, - pipettesWithoutStep, - modulesWithoutStep, - gripperWithoutStep, - fixtureWithoutStep, - t, - }) + const warning = hasWarning + ? getWarningContent({ + noCommands, + pipettesWithoutStep, + modulesWithoutStep, + gripperWithoutStep, + fixtureWithoutStep, + t, + }) + : null const cancelModal = (): void => { setShowExportWarningModal(false) @@ -239,11 +239,13 @@ export function ProtocolOverview(): JSX.Element { {t('shared:cancel')} diff --git a/robot-server/robot_server/runs/router/base_router.py b/robot-server/robot_server/runs/router/base_router.py index 639e6d91628..788ca44aa1c 100644 --- a/robot-server/robot_server/runs/router/base_router.py +++ b/robot-server/robot_server/runs/router/base_router.py @@ -132,9 +132,9 @@ class AllRunsLinks(BaseModel): class CurrentStateLinks(BaseModel): """Links returned with the current state of a run.""" - current: Optional[CommandLinkNoMeta] = Field( + lastCompleted: Optional[CommandLinkNoMeta] = Field( None, - description="Path to the current command when current state was reported, if any.", + description="Path to the last completed command when current state was reported, if any.", ) @@ -564,7 +564,7 @@ async def get_run_commands_error( """ ), responses={ - status.HTTP_200_OK: {"model": SimpleBody[RunCurrentState]}, + status.HTTP_200_OK: {"model": Body[RunCurrentState, CurrentStateLinks]}, status.HTTP_409_CONFLICT: {"model": ErrorBody[RunStopped]}, }, ) @@ -590,17 +590,18 @@ async def get_current_state( for pipetteId, nozzle_map in active_nozzle_maps.items() } - current_command = run_data_manager.get_current_command(run_id=runId) + last_completed_command = run_data_manager.get_last_completed_command( + run_id=runId + ) except RunNotCurrentError as e: raise RunStopped(detail=str(e)).as_error(status.HTTP_409_CONFLICT) - # TODO(jh, 03-11-24): Use `last_completed_command` instead of `current_command` to avoid concurrency gotchas. links = CurrentStateLinks.construct( - current=CommandLinkNoMeta.construct( - id=current_command.command_id, - href=f"/runs/{runId}/commands/{current_command.command_id}", + lastCompleted=CommandLinkNoMeta.construct( + id=last_completed_command.command_id, + href=f"/runs/{runId}/commands/{last_completed_command.command_id}", ) - if current_command is not None + if last_completed_command is not None else None ) diff --git a/robot-server/robot_server/runs/run_data_manager.py b/robot-server/robot_server/runs/run_data_manager.py index 3edf89ef163..d30f5c33979 100644 --- a/robot-server/robot_server/runs/run_data_manager.py +++ b/robot-server/robot_server/runs/run_data_manager.py @@ -456,10 +456,20 @@ def get_current_command(self, run_id: str) -> Optional[CommandPointer]: if self._run_orchestrator_store.current_run_id == run_id: return self._run_orchestrator_store.get_current_command() else: - # todo(mm, 2024-05-20): - # For historical runs to behave consistently with the current run, - # this should be the most recently completed command, not `None`. - return None + return self._get_historical_run_last_command(run_id=run_id) + + def get_last_completed_command(self, run_id: str) -> Optional[CommandPointer]: + """Get the "last" command, if any. + + See `ProtocolEngine.state_view.commands.get_most_recently_finalized_command()` for the definition of "last." + + Args: + run_id: ID of the run. + """ + if self._run_orchestrator_store.current_run_id == run_id: + return self._run_orchestrator_store.get_most_recently_finalized_command() + else: + return self._get_historical_run_last_command(run_id=run_id) def get_recovery_target_command(self, run_id: str) -> Optional[CommandPointer]: """Get the current error recovery target. @@ -554,3 +564,22 @@ def _get_run_time_parameters(self, run_id: str) -> List[RunTimeParameter]: return self._run_orchestrator_store.get_run_time_parameters() else: return self._run_store.get_run_time_parameters(run_id=run_id) + + def _get_historical_run_last_command(self, run_id: str) -> Optional[CommandPointer]: + command_slice = self._run_store.get_commands_slice( + run_id=run_id, cursor=None, length=1, include_fixit_commands=True + ) + if not command_slice.commands: + return None + command = command_slice.commands[-1] + + return ( + CommandPointer( + command_id=command.id, + command_key=command.key, + created_at=command.createdAt, + index=command_slice.cursor, + ) + if command + else None + ) diff --git a/robot-server/robot_server/runs/run_orchestrator_store.py b/robot-server/robot_server/runs/run_orchestrator_store.py index e05bd3bd349..efa97347ae9 100644 --- a/robot-server/robot_server/runs/run_orchestrator_store.py +++ b/robot-server/robot_server/runs/run_orchestrator_store.py @@ -335,9 +335,13 @@ def get_run_time_parameters(self) -> List[RunTimeParameter]: return self.run_orchestrator.get_run_time_parameters() def get_current_command(self) -> Optional[CommandPointer]: - """Get the current running command.""" + """Get the current running command, if any.""" return self.run_orchestrator.get_current_command() + def get_most_recently_finalized_command(self) -> Optional[CommandPointer]: + """Get the most recently finalized command, if any.""" + return self.run_orchestrator.get_most_recently_finalized_command() + def get_command_slice( self, cursor: Optional[int], length: int, include_fixit_commands: bool ) -> CommandSlice: diff --git a/robot-server/tests/runs/router/test_base_router.py b/robot-server/tests/runs/router/test_base_router.py index 25c91f70ade..894950343e4 100644 --- a/robot-server/tests/runs/router/test_base_router.py +++ b/robot-server/tests/runs/router/test_base_router.py @@ -876,10 +876,12 @@ async def test_get_current_state_success( decoy.when(mock_run_data_manager.get_nozzle_maps(run_id=run_id)).then_return( mock_nozzle_maps ) - decoy.when(mock_run_data_manager.get_current_command(run_id=run_id)).then_return( + decoy.when( + mock_run_data_manager.get_last_completed_command(run_id=run_id) + ).then_return( CommandPointer( - command_id="current-command-id", - command_key="current-command-key", + command_id="last-command-id", + command_key="last-command-key", created_at=datetime(year=2024, month=4, day=4), index=101, ) @@ -901,9 +903,9 @@ async def test_get_current_state_success( } ) assert result.content.links == CurrentStateLinks( - current=CommandLinkNoMeta( - href="/runs/test-run-id/commands/current-command-id", - id="current-command-id", + lastCompleted=CommandLinkNoMeta( + href="/runs/test-run-id/commands/last-command-id", + id="last-command-id", ) ) diff --git a/robot-server/tests/runs/test_run_data_manager.py b/robot-server/tests/runs/test_run_data_manager.py index 869f1c1c643..5e4aed1f3e2 100644 --- a/robot-server/tests/runs/test_run_data_manager.py +++ b/robot-server/tests/runs/test_run_data_manager.py @@ -1004,12 +1004,105 @@ def test_get_current_command_not_current_run( subject: RunDataManager, mock_run_store: RunStore, mock_run_orchestrator_store: RunOrchestratorStore, + run_command: commands.Command, ) -> None: - """Should return None because the run is not current.""" + """Should get the last command from the run store for a historical run.""" + last_command_slice = commands.WaitForResume( + id="command-id-1", + key="command-key", + createdAt=datetime(year=2021, month=1, day=1), + status=commands.CommandStatus.SUCCEEDED, + params=commands.WaitForResumeParams(message="Hello"), + ) + + expected_last_command = CommandPointer( + command_id="command-id-1", + command_key="command-key", + created_at=datetime(year=2021, month=1, day=1), + index=0, + ) + + command_slice = CommandSlice( + commands=[last_command_slice], cursor=0, total_length=1 + ) + decoy.when(mock_run_orchestrator_store.current_run_id).then_return("not-run-id") + decoy.when( + mock_run_store.get_commands_slice( + run_id="run-id", cursor=None, length=1, include_fixit_commands=True + ) + ).then_return(command_slice) result = subject.get_current_command("run-id") - assert result is None + assert result == expected_last_command + + +def test_get_last_completed_command_current_run( + decoy: Decoy, + subject: RunDataManager, + mock_run_orchestrator_store: RunOrchestratorStore, + run_command: commands.Command, +) -> None: + """Should get the last command from the engine store for the current run.""" + run_id = "current-run-id" + expected_last_command = CommandPointer( + command_id=run_command.id, + command_key=run_command.key, + created_at=run_command.createdAt, + index=1, + ) + + decoy.when(mock_run_orchestrator_store.current_run_id).then_return(run_id) + decoy.when( + mock_run_orchestrator_store.get_most_recently_finalized_command() + ).then_return(expected_last_command) + + result = subject.get_last_completed_command(run_id) + + assert result == expected_last_command + + +def test_get_last_completed_command_not_current_run( + decoy: Decoy, + subject: RunDataManager, + mock_run_orchestrator_store: RunOrchestratorStore, + mock_run_store: RunStore, + run_command: commands.Command, +) -> None: + """Should get the last command from the run store for a historical run.""" + run_id = "historical-run-id" + + last_command_slice = commands.WaitForResume( + id="command-id-1", + key="command-key", + createdAt=datetime(year=2021, month=1, day=1), + status=commands.CommandStatus.SUCCEEDED, + params=commands.WaitForResumeParams(message="Hello"), + ) + + expected_last_command = CommandPointer( + command_id="command-id-1", + command_key="command-key", + created_at=datetime(year=2021, month=1, day=1), + index=1, + ) + + decoy.when(mock_run_orchestrator_store.current_run_id).then_return( + "different-run-id" + ) + + command_slice = CommandSlice( + commands=[last_command_slice], cursor=1, total_length=1 + ) + decoy.when( + mock_run_store.get_commands_slice( + run_id=run_id, cursor=None, length=1, include_fixit_commands=True + ) + ).then_return(command_slice) + + result = subject.get_last_completed_command(run_id) + + assert result == expected_last_command def test_get_command_from_engine( diff --git a/update-server/oe_upload.py b/update-server/oe_upload.py index 43d8bf47525..9d70dcaf430 100644 --- a/update-server/oe_upload.py +++ b/update-server/oe_upload.py @@ -26,7 +26,7 @@ async def do_update(update_file: str, host: str, timeout = aiohttp.ClientTimeout(total=7200) async with aiohttp.ClientSession(timeout=timeout) as session: root = host + '/server/update' - filename = "ot3-system.zip" + filename = "system-update.zip" print(f"Starting update of {update_file.name} to {host}") begin_resp = await session.post(root + '/begin') if begin_resp.status == 409: