Skip to content

Commit

Permalink
add script for triggering remote job, reading logs, and opening simul…
Browse files Browse the repository at this point in the history
…arium website
  • Loading branch information
ascibisz committed Aug 28, 2024
1 parent 3480a55 commit 7bea731
Show file tree
Hide file tree
Showing 2 changed files with 81 additions and 1 deletion.
79 changes: 79 additions & 0 deletions batch_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import boto3
import time
import webbrowser
import uuid
import argparse

client = boto3.client('batch')
cloudwatch = boto3.client('logs')

parser = argparse.ArgumentParser()
parser.add_argument("-r", type=str, default="examples/recipes/v2/one_sphere.json", help="recipe")
parser.add_argument("-c", type=str, default="examples/packing-configs/run.json", help="config")
args = parser.parse_args()

# Submit Job to AWS batch using config and recipe from commandline
response = client.submit_job(
jobName=str(uuid.uuid4()),
jobQueue='arn:aws:batch:us-west-2:771753870375:job-queue/alli-ruge-job-queue',
jobDefinition='cellpack-test-job-definition',
parameters={},
containerOverrides={
'command': [
'echo',
'hello world',
],
'environment': [
{
'name': 'config',
'value': args.c
},
{
'name': 'recipe',
'value': args.r
},
],
'resourceRequirements': []
},
)

if response and response.get('jobId'):
jobId = response.get('jobId')
jobStatus = "N/A"
while jobStatus not in ["SUCCEEDED", "FAILED"]:
# While the job is processing, check job status every 2 seconds and
# print the status when it changes
time.sleep(2)
descriptionResponse = client.describe_jobs(
jobs=[
jobId,
]
)
if descriptionResponse and descriptionResponse.get('jobs') and len(descriptionResponse.get('jobs')) > 0:
newStatus = descriptionResponse.get('jobs')[0].get("status")
if newStatus != jobStatus:
print(newStatus)
jobStatus = newStatus
logStreamName = descriptionResponse.get('jobs')[0].get("container", {}).get("logStreamName")
else:
print("something went wrong, leaving!")
continue

# Using logStreamName provided while checking status, get all of the logs from the job run
cloudwatchResponse = cloudwatch.get_log_events(
logStreamName=logStreamName,
logGroupIdentifier="/aws/batch/job"
)

print(f"\nLogs from AWS Batch Run:")
for event in cloudwatchResponse.get("events", {}):
message = event.get("message")
if message.startswith("View in Simularium:"):
# We found the log with the simularium url!
url = message[20:]
webbrowser.open_new_tab(url)
else:
print(message)
else:
print("Error: response didn't have jobId")

3 changes: 2 additions & 1 deletion cellpack/autopack/upy/simularium/simularium_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -1403,6 +1403,7 @@ def post_and_open_file(self, file_name, open_results_in_browser=True):
)
if file_name and url:
simulariumHelper.store_metadata(file_name, url, db="firebase")
print(f"View in Simularium: https://simularium.allencell.org/viewer?trajUrl={url}")
if open_results_in_browser:
simulariumHelper.open_in_simularium(url)

Expand All @@ -1411,7 +1412,7 @@ def store_result_file(file_path, storage=None):
if storage == "aws":
handler = DATABASE_IDS.handlers().get(storage)
initialized_handler = handler(
bucket_name="cellpack-results",
bucket_name="cellpack-demo",
sub_folder_name="simularium",
region_name="us-west-2",
)
Expand Down

0 comments on commit 7bea731

Please sign in to comment.