From 80a484405c78d0f9da8e510c6dde1fc23986093f Mon Sep 17 00:00:00 2001 From: Nitin Garg Date: Thu, 3 Oct 2024 19:18:16 +0000 Subject: [PATCH] address some self-review comments --- .../examples/dlio/parse_logs.py | 45 +------------------ .../testing_on_gke/examples/fio/parse_logs.py | 45 +------------------ .../examples/utils/parse_logs_common.py | 43 +++++++++++++++++- 3 files changed, 44 insertions(+), 89 deletions(-) diff --git a/perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py b/perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py index 4357ed4420..66146090df 100644 --- a/perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py +++ b/perfmetrics/scripts/testing_on_gke/examples/dlio/parse_logs.py @@ -28,8 +28,7 @@ sys.path.append("../") import dlio_workload from utils.utils import get_memory, get_cpu, unix_to_timestamp, standard_timestamp, is_mash_installed, get_memory_from_monitoring_api, get_cpu_from_monitoring_api, timestamp_to_epoch -from utils.parse_logs_common import ensure_directory_exists, download_gcs_objects, parse_arguments, SUPPORTED_SCENARIOS, default_service_account_key_file -from utils.gsheet import append_data_to_gsheet, url +from utils.parse_logs_common import ensure_directory_exists, download_gcs_objects, parse_arguments, SUPPORTED_SCENARIOS, default_service_account_key_file, export_to_csv, export_to_gsheet _LOCAL_LOGS_LOCATION = "../../bin/dlio-logs/logs" @@ -329,49 +328,7 @@ def writeOutput( ) rows.append(new_row) - def export_to_csv(output_file_path: str, header: str, rows: List): - if output_file_path and output_file_path.strip(): - ensure_directory_exists(os.path.dirname(output_file_path)) - with open(output_file_path, "a") as output_file_fwr: - # Write a new header. - output_file_fwr.write(f"{','.join(header)}\n") - for row in rows: - output_file_fwr.write(f"{','.join([f'{val}' for val in row])}\n") - output_file_fwr.close() - print( - "\nSuccessfully published outputs of FIO test runs to" - f" {output_file_path} !!!" - ) - export_to_csv(output_file_path=args.output_file, header=_HEADER, rows=rows) - - def export_to_gsheet( - header: str, - rows: List, - output_gsheet_id: str, - output_worksheet_name: str, - output_gsheet_keyfile: str, - ): - if ( - output_gsheet_id - and output_gsheet_id.strip() - and output_worksheet_name - and output_worksheet_name.strip() - ): - append_data_to_gsheet( - data={"header": header, "values": rows}, - worksheet=output_worksheet_name, - gsheet_id=output_gsheet_id, - serviceAccountKeyFile=output_gsheet_keyfile, - # default_service_account_key_file( - # args.project_id - # ), - ) - print( - "\nSuccessfully published outputs of FIO test runs at worksheet" - f" '{args.output_worksheet_name}' in {url(args.output_gsheet_id)}" - ) - export_to_gsheet( output_gsheet_id=args.output_gsheet_id, output_worksheet_name=args.output_worksheet_name, diff --git a/perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py b/perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py index a84e433698..1cef365805 100644 --- a/perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py +++ b/perfmetrics/scripts/testing_on_gke/examples/fio/parse_logs.py @@ -28,8 +28,7 @@ sys.path.append("../") import fio_workload from utils.utils import get_memory, get_cpu, unix_to_timestamp, is_mash_installed, get_memory_from_monitoring_api, get_cpu_from_monitoring_api -from utils.parse_logs_common import ensure_directory_exists, download_gcs_objects, parse_arguments, SUPPORTED_SCENARIOS, default_service_account_key_file -from utils.gsheet import append_data_to_gsheet, url +from utils.parse_logs_common import ensure_directory_exists, download_gcs_objects, parse_arguments, SUPPORTED_SCENARIOS, default_service_account_key_file, export_to_csv, export_to_gsheet _LOCAL_LOGS_LOCATION = "../../bin/fio-logs" @@ -363,49 +362,7 @@ def writeOutput( ) rows.append(new_row) - def export_to_csv(output_file_path: str, header: str, rows: List): - if output_file_path and output_file_path.strip(): - ensure_directory_exists(os.path.dirname(output_file_path)) - with open(output_file_path, "a") as output_file_fwr: - # Write a new header. - output_file_fwr.write(f"{','.join(header)}\n") - for row in rows: - output_file_fwr.write(f"{','.join([f'{val}' for val in row])}\n") - output_file_fwr.close() - print( - "\nSuccessfully published outputs of FIO test runs to" - f" {output_file_path} !!!" - ) - export_to_csv(output_file_path=args.output_file, header=_HEADER, rows=rows) - - def export_to_gsheet( - header: str, - rows: List, - output_gsheet_id: str, - output_worksheet_name: str, - output_gsheet_keyfile: str, - ): - if ( - output_gsheet_id - and output_gsheet_id.strip() - and output_worksheet_name - and output_worksheet_name.strip() - ): - append_data_to_gsheet( - data={"header": header, "values": rows}, - worksheet=output_worksheet_name, - gsheet_id=output_gsheet_id, - serviceAccountKeyFile=output_gsheet_keyfile, - # default_service_account_key_file( - # args.project_id - # ), - ) - print( - "\nSuccessfully published outputs of FIO test runs at worksheet" - f" '{args.output_worksheet_name}' in {url(args.output_gsheet_id)}" - ) - export_to_gsheet( output_gsheet_id=args.output_gsheet_id, output_worksheet_name=args.output_worksheet_name, diff --git a/perfmetrics/scripts/testing_on_gke/examples/utils/parse_logs_common.py b/perfmetrics/scripts/testing_on_gke/examples/utils/parse_logs_common.py index 309a111e14..371e6a3241 100644 --- a/perfmetrics/scripts/testing_on_gke/examples/utils/parse_logs_common.py +++ b/perfmetrics/scripts/testing_on_gke/examples/utils/parse_logs_common.py @@ -18,7 +18,8 @@ import argparse import os import subprocess -from typing import Tuple +from typing import List, Tuple +from utils.gsheet import append_data_to_gsheet, url from utils.utils import run_command SUPPORTED_SCENARIOS = [ @@ -155,3 +156,43 @@ def default_service_account_key_file(project_id: str) -> str: return "/usr/local/google/home/gargnitin/work/cloud/storage/client/gcsfuse/src/gcsfuse/perfmetrics/scripts/testing_on_gke/examples/20240919-gcs-fuse-test-ml-d6e0247b2cf1.json" else: raise Exception(f"Unknown project-id: {project_id}") + + +def export_to_csv(output_file_path: str, header: str, rows: List): + if output_file_path and output_file_path.strip(): + ensure_directory_exists(os.path.dirname(output_file_path)) + with open(output_file_path, "a") as output_file_fwr: + # Write a new header. + output_file_fwr.write(f"{','.join(header)}\n") + for row in rows: + output_file_fwr.write(f"{','.join([f'{val}' for val in row])}\n") + output_file_fwr.close() + print( + "\nSuccessfully published outputs of test runs to" + f" {output_file_path} !!!" + ) + + +def export_to_gsheet( + header: str, + rows: List, + output_gsheet_id: str, + output_worksheet_name: str, + output_gsheet_keyfile: str, +): + if ( + output_gsheet_id + and output_gsheet_id.strip() + and output_worksheet_name + and output_worksheet_name.strip() + ): + append_data_to_gsheet( + data={"header": header, "values": rows}, + worksheet=output_worksheet_name, + gsheet_id=output_gsheet_id, + serviceAccountKeyFile=output_gsheet_keyfile, + ) + print( + "\nSuccessfully published outputs of test runs at worksheet" + f" '{output_worksheet_name}' in {url(output_gsheet_id)}" + )