Skip to content

Commit

Permalink
Add performance tests
Browse files Browse the repository at this point in the history
Signed-off-by: Mariia Azbeleva <[email protected]>
  • Loading branch information
azbeleva committed Oct 3, 2023
1 parent 22431a4 commit eaee0c1
Show file tree
Hide file tree
Showing 6 changed files with 364 additions and 13 deletions.
15 changes: 2 additions & 13 deletions Robot-Framework/config/variables.robot
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,6 @@
*** Settings ***
Library OperatingSystem

*** Variables ***
${DEVICE} ${DEVICE}

${SERIAL_PORT} ${EMPTY}
${DEVICE_IP_ADDRESS} ${EMPTY}
${SOCKET_IP_ADDRESS} ${EMPTY}
${LOGIN} ${EMPTY}
${PASSWORD} ${EMPTY}
${PLUG_USERNAME} ${EMPTY}
${PLUG_PASSWORD} ${EMPTY}
${netvm_service} ${EMPTY}

*** Keywords ***

Set Variables
Expand All @@ -25,7 +13,8 @@ Set Variables
Set Global Variable ${SERIAL_PORT} ${config['addresses']['${DEVICE}']['serial_port']}
Set Global Variable ${DEVICE_IP_ADDRESS} ${config['addresses']['${DEVICE}']['device_ip_address']}
Set Global Variable ${SOCKET_IP_ADDRESS} ${config['addresses']['${DEVICE}']['socket_ip_address']}
Set Global Variable ${netvm_service} [email protected]
Set Global Variable ${NETVM_SERVICE} [email protected]
Set Global Variable ${THREADS_NUMBER} ${config['addresses']['${DEVICE}']['threads']}

Read Config
[Arguments] ${file_path}=../config/test_config.json
Expand Down
196 changes: 196 additions & 0 deletions Robot-Framework/lib/PerformanceDataProcessing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)
# SPDX-License-Identifier: Apache-2.0

import csv
import os
import matplotlib.pyplot as plt
import logging
from robot.api.deco import keyword


class PerformanceDataProcessing:

def __init__(self, device):
# Initialize the instance variable with the global variable value
self.data_dir = "../../../Performance_test_results/"
self.device = device

def _write_to_csv(self, test_name, data):
file_path = os.path.join(self.data_dir, f"{self.device}_{test_name}.csv")
logging.info(f"Writing data to {file_path}")
with open(file_path, 'a', newline='') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow(data)

@keyword
def write_cpu_to_csv(self, test_name, build_number, cpu_data):
data = [build_number,
cpu_data['cpu_events_per_second'],
cpu_data['min_latency'],
cpu_data['avg_latency'],
cpu_data['max_latency'],
cpu_data['cpu_events_per_thread'],
cpu_data['cpu_events_per_thread_stddev'],
self.device]
self._write_to_csv(test_name, data)

@keyword
def write_mem_to_csv(self, test_name, build_number, mem_data):
data = [build_number,
mem_data['operations_per_second'],
mem_data['data_transfer_speed'],
mem_data['min_latency'],
mem_data['avg_latency'],
mem_data['max_latency'],
mem_data['avg_events_per_thread'],
mem_data['events_per_thread_stddev'],
self.device]
self._write_to_csv(test_name, data)

@keyword
def read_cpu_csv_and_plot(self, test_name):
build_numbers = []
cpu_events_per_second = []
min_latency = []
avg_latency = []
max_latency = []
cpu_events_per_thread = []
cpu_events_per_thread_stddev = []

with open(f"{self.data_dir}{self.device}_{test_name}.csv", 'r') as csvfile:
csvreader = csv.reader(csvfile)
logging.info("Reading data from csv file...")
for row in csvreader:
if row[7] == self.device:
build_numbers.append(str(row[0]))
cpu_events_per_second.append(float(row[1]))
min_latency.append(float(row[2]))
avg_latency.append(float(row[3]))
max_latency.append(float(row[4]))
cpu_events_per_thread.append(float(row[5]))
cpu_events_per_thread_stddev.append(float(row[6]))

plt.figure(figsize=(20, 10))
plt.set_loglevel('WARNING')

# Plot 1: CPU Events per Second
plt.subplot(3, 1, 1)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, cpu_events_per_second, marker='o', linestyle='-', color='b')
plt.title('CPU Events per Second', loc='right', fontweight="bold")
plt.ylabel('CPU Events per Second')
plt.grid(True)
plt.xticks(build_numbers)

# Plot 2: CPU Events per Thread
plt.subplot(3, 1, 2)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, cpu_events_per_thread, marker='o', linestyle='-', color='b')
plt.title('CPU Events per Thread', loc='right', fontweight="bold")
plt.ylabel('CPU Events per Thread')
plt.grid(True)
plt.xticks(build_numbers)
# Create line chart with error bars on the same subplot
plt.errorbar(build_numbers, cpu_events_per_thread,
yerr=cpu_events_per_thread_stddev,
capsize=4)

# Plot 3: Latency
plt.subplot(3, 1, 3)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, avg_latency, marker='o', linestyle='-', color='b', label='Avg')
plt.ylabel('Avg Latency (ms)')
plt.legend(loc='upper left')
plt.xlabel('Build Number')
plt.twinx()
plt.plot(build_numbers, max_latency, marker='o', linestyle='-', color='r', label='Max')
plt.plot(build_numbers, min_latency, marker='o', linestyle='-', color='g', label='Min')
plt.ylabel('Max/Min Latency (ms)')
plt.legend(loc='upper right')
plt.title('Latency', loc='right', fontweight="bold")
plt.grid(True)
plt.xticks(build_numbers)

plt.suptitle(f'{test_name} ({self.device})', fontsize=16, fontweight='bold')

plt.tight_layout()
plt.savefig(f'../test-suites/{self.device}_{test_name}.png') # Save the plot as an image file

@keyword
def read_mem_csv_and_plot(self, test_name):
build_numbers = []
operations_per_second = []
data_transfer_speed = []
min_latency = []
avg_latency = []
max_latency = []
avg_events_per_thread = []
events_per_thread_stddev = []

with open(f"{self.data_dir}{self.device}_{test_name}.csv", 'r') as csvfile:
csvreader = csv.reader(csvfile)
logging.info("Reading data from csv file...")
for row in csvreader:
if row[8] == self.device:
build_numbers.append(str(row[0]))
operations_per_second.append(float(row[1]))
data_transfer_speed.append(float(row[2]))
min_latency.append(float(row[3]))
avg_latency.append(float(row[4]))
max_latency.append(float(row[5]))
avg_events_per_thread.append(float(row[6]))
events_per_thread_stddev.append(float(row[7]))

plt.figure(figsize=(20, 10))
plt.set_loglevel('WARNING')

# Plot 1: Operations Per Second
plt.subplot(3, 1, 1)
plt.ticklabel_format(axis='y', style='sci', useMathText=True)
plt.plot(build_numbers, operations_per_second, marker='o', linestyle='-', color='b')
plt.title('Operations per Second', loc='right', fontweight="bold")
plt.ylabel('Operations per Second')
plt.grid(True)
plt.xticks(build_numbers)

# Plot 2: Data Transfer Speed
plt.subplot(3, 1, 2)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, data_transfer_speed, marker='o', linestyle='-', color='b')
plt.title('Data Transfer Speed', loc='right', fontweight="bold")
plt.ylabel('Data Transfer Speed (MiB/sec)')
plt.grid(True)
plt.xticks(build_numbers)

# Plot 3: Latency
plt.subplot(3, 1, 3)
plt.ticklabel_format(axis='y', style='plain')
plt.plot(build_numbers, avg_latency, marker='o', linestyle='-', color='b', label='Avg')
plt.ylabel('Avg Latency (ms)')
plt.legend(loc='upper left')
plt.grid(True)
plt.xlabel('Build Number')
plt.twinx()
plt.plot(build_numbers, max_latency, marker='o', linestyle='-', color='r', label='Max')
plt.plot(build_numbers, min_latency, marker='o', linestyle='-', color='g', label='Min')
plt.ylabel('Max/Min Latency (ms)')
plt.legend(loc='upper right')
plt.title('Latency', loc='right', fontweight="bold")
plt.xticks(build_numbers)

plt.suptitle(f'{test_name} ({self.device})', fontsize=16, fontweight='bold')

plt.tight_layout()
plt.savefig(f'../test-suites/{self.device}_{test_name}.png') # Save the plot as an image file

@keyword
def save_cpu_data(self, test_name, build_number, cpu_data):

self.write_cpu_to_csv(test_name, build_number, cpu_data)
self.read_cpu_csv_and_plot(test_name)

@keyword
def save_memory_data(self, test_name, build_number, cpu_data):

self.write_mem_to_csv(test_name, build_number, cpu_data)
self.read_mem_csv_and_plot(test_name)
59 changes: 59 additions & 0 deletions Robot-Framework/lib/output_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,62 @@ def verify_date_format(date_string):
datetime.strptime(date_string, '%Y%m%d')
except ValueError:
raise Exception("Wrong date format in version date field")

def parse_cpu_results(output):
def extract_value(pattern, output):
match = re.search(pattern, output)
if match:
return match.group(1)
else:
raise Exception(f"Couldn't parse result of the test with pattern: {pattern}")

output = re.sub(r'\033\[.*?m', '', output) # remove colors from serial console output

cpu_events_per_second = extract_value(r'events per second:\s*([.\d]+)', output)
min_latency = extract_value(r'min:\s+([.\d]+)', output)
max_latency = extract_value(r'max:\s+([.\d]+)', output)
avg_latency = extract_value(r'avg:\s+([.\d]+)', output)
cpu_events_per_thread = extract_value(r'events \(avg\/stddev\):\s+([.\d]+)', output)
cpu_events_per_thread_stddev = extract_value(r'events \(avg\/stddev\):\s+[.\d]+\/([.\d]+)', output)

cpu_data = {
'cpu_events_per_second': cpu_events_per_second,
'min_latency': min_latency,
'max_latency': max_latency,
'avg_latency': avg_latency,
'cpu_events_per_thread': cpu_events_per_thread,
'cpu_events_per_thread_stddev': cpu_events_per_thread_stddev
}

return cpu_data

def parse_memory_results(output):
def extract_value(pattern, output):
match = re.search(pattern, output)
if match:
return match.group(1)
else:
raise Exception(f"Couldn't parse result of the test with pattern: {pattern}")

output = re.sub(r'\033\[.*?m', '', output) # remove colors from serial console output

operations_per_second = extract_value(r'Total operations:\s*\d+ \(([.\d]+) per second', output)
data_transfer_speed = extract_value(r'\(([.\d]+) MiB\/sec\)', output)
min_latency = extract_value(r'min:\s+([.\d]+)', output)
max_latency = extract_value(r'max:\s+([.\d]+)', output)
avg_latency = extract_value(r'avg:\s+([.\d]+)', output)
avg_events_per_thread = extract_value(r'events \(avg\/stddev\):\s+([.\d]+)', output)
events_per_thread_stddev = extract_value(r'events \(avg\/stddev\):\s+[.\d]+\/([.\d]+)', output)

mem_data = {
'operations_per_second': operations_per_second,
'data_transfer_speed': data_transfer_speed,
'min_latency': min_latency,
'max_latency': max_latency,
'avg_latency': avg_latency,
'avg_events_per_thread': avg_events_per_thread,
'events_per_thread_stddev': events_per_thread_stddev
}

return mem_data

13 changes: 13 additions & 0 deletions Robot-Framework/resources/ssh_keywords.resource
Original file line number Diff line number Diff line change
Expand Up @@ -223,3 +223,16 @@ Check if ssh is ready on netvm
Sleep 1
END
IF ${status} == False FAIL Port 22 of NetVM is not ready after ${timeout}

Install sysbench tool
${command_output}= Execute Command nix-env --query --installed
${not_installed} = Run Keyword And Return Status Should Not Contain ${command_output} sysbench
IF ${not_installed}
Execute Command nix-env -i sysbench
${command_output}= Execute Command nix-env --query --installed
Log To Console ${\n}Installed packages:${\n}${command_output}
Should Contain ${command_output} sysbench sysbench tool was not installed
Log To Console sysbench tool was succesfully installed
ELSE
Log To Console ${\n}sysbench tool was already installed
END
93 changes: 93 additions & 0 deletions Robot-Framework/test-suites/performance/performance.robot
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# SPDX-FileCopyrightText: 2022-2023 Technology Innovation Institute (TII)
# SPDX-License-Identifier: Apache-2.0

*** Settings ***
Documentation Gathering performance data
Force Tags performance
Resource ../../resources/ssh_keywords.resource
Resource ../../config/variables.robot
Library ../../lib/output_parser.py
Library ../../lib/PerformanceDataProcessing.py ${DEVICE}
Suite Setup Common Setup
Suite Teardown Close All Connections

*** Test Cases ***

CPU One thread test
[Documentation] Run a CPU benchmark using Sysbench with a duration of 10 seconds and a SINGLE thread.
... The benchmark records to csv CPU events per second, events per thread, and latency data.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-1
${output} Execute Command sysbench cpu --time=10 --threads=1 --cpu-max-prime=20000 run
Log ${output}
&{cpu_data} Parse Cpu Results ${output}
Save Cpu Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="CPU Plot" width="1200"> HTML

CPU multimple threads test
[Documentation] Run a CPU benchmark using Sysbench with a duration of 10 seconds and MULTIPLE threads.
... The benchmark records to csv CPU events per second, events per thread, and latency data.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-2
${output} Execute Command sysbench cpu --time=10 --threads=${threads_number} --cpu-max-prime=20000 run
Log ${output}
&{cpu_data} Parse Cpu Results ${output}
Save Cpu Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="CPU Plot" width="1200"> HTML

Memory Read One thread test
[Documentation] Run a memory benchmark using Sysbench for 60 seconds with a SINGLE thread.
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for READ operations.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-3
${output} Execute Command sysbench memory --time=60 --memory-oper=read --threads=1 run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
Save Memory Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="Mem Plot" width="1200"> HTML

Memory Write One thread test
[Documentation] Run a memory benchmark using Sysbench for 60 seconds with a SINGLE thread.
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for WRITE operations.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-4
${output} Execute Command sysbench memory --time=60 --memory-oper=write --threads=1 run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
Save Memory Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="Mem Plot" width="1200"> HTML

Memory Read multimple threads test
[Documentation] Run a memory benchmark using Sysbench for 60 seconds with MULTIPLE threads.
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for READ operations.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-5
${output} Execute Command sysbench memory --time=60 --memory-oper=read --threads=${threads_number} run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
Save Memory Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="Mem Plot" width="1200"> HTML

Memory Write multimple threads test
[Documentation] Run a memory benchmark using Sysbench for 60 seconds with MULTIPLE threads.
... The benchmark records Operations Per Second, Data Transfer Speed, Average Events per Thread,
... and Latency for WRITE operations.
... Create visual plots to represent these metrics comparing to previous tests.
[Tags] performance cpu SP-T67-6
${output} Execute Command sysbench memory --time=60 --memory-oper=write --threads=${threads_number} run
Log ${output}
&{cpu_data} Parse Memory Results ${output}
Save Memory Data ${TEST NAME} ${buildID} ${cpu_data}
Log <img src="${DEVICE}_${TEST NAME}.png" alt="Mem Plot" width="1200"> HTML



*** Keywords ***

Common Setup
Set Variables ${DEVICE}
Connect
Install sysbench tool
Loading

0 comments on commit eaee0c1

Please sign in to comment.