Unverified Commit fe234262 authored by Ziyue Yang's avatar Ziyue Yang Committed by GitHub
Browse files

Benchmarks: Micro benchmark - Support verification and parallel run for disk...


Benchmarks: Micro benchmark - Support verification and parallel run for disk performance benchmark (#741)

**Description**
Adds verification and parallel run support for disk performance
benchmark.

**Major Revision**
- Adds `--verify` flag to support verify written data.
- Supports loading benchmark options from `PROC_RANK`, `BLOCK_DEVICES`
and `NUMA_NODES` environmental variables.

---------
Co-authored-by: default avatarguoshzhao <guzhao@microsoft.com>
parent 86a940c1
...@@ -126,16 +126,51 @@ def add_parser_arguments(self): ...@@ -126,16 +126,51 @@ def add_parser_arguments(self):
help='Number of threads in %s test.' % io_str, help='Number of threads in %s test.' % io_str,
) )
def _preprocess(self): self._parser.add_argument(
'--verify',
type=str,
required=False,
help=(
'Verification method specified for fio --verify flag. '
'See https://fio.readthedocs.io/en/latest/fio_doc.html#cmdoption-arg-verify.'
),
)
def _get_arguments_from_env(self):
"""Read environment variables from runner used for parallel and fill in block_device_index and numa_node_index.
Get 'PROC_RANK'(rank of current process) 'NUMA_NODES' environment variables
Get block_device_index and numa_node_index according to PROC_RANK and 'NUMA_NODES'['PROC_RANK']
Note: The config from env variables will overwrite the configs defined in the command line
"""
try:
if os.getenv('PROC_RANK'):
rank = int(os.getenv('PROC_RANK'))
self._args.block_devices = [self._args.block_devices[rank]]
if os.getenv('NUMA_NODES'):
self._args.numa = int(os.getenv('NUMA_NODES').split(',')[rank])
return True
except BaseException as e:
self._result.set_return_code(ReturnCode.INVALID_ARGUMENT)
logger.error(
'The proc_rank is out of index of devices - benchmark: {}, message: {}.'.format(self._name, str(e))
)
return False
def _preprocess(self): # noqa: C901
"""Preprocess/preparation operations before the benchmarking. """Preprocess/preparation operations before the benchmarking.
Return: Return:
True if _preprocess() succeed. True if _preprocess() succeed.
""" """
if not super()._preprocess(): if not super()._preprocess() or not self._get_arguments_from_env():
return False return False
fio_path = os.path.join(self._args.bin_dir, self._bin_name) fio_basic_command = os.path.join(self._args.bin_dir, self._bin_name)
if self._args.numa is not None:
fio_basic_command = f'numactl -N {self._args.numa} {fio_basic_command}'
if self._args.verify is not None:
fio_basic_command = f'{fio_basic_command} --verify={self._args.verify}'
for block_device in self._args.block_devices: for block_device in self._args.block_devices:
if not Path(block_device).is_block_device(): if not Path(block_device).is_block_device():
...@@ -144,13 +179,13 @@ def _preprocess(self): ...@@ -144,13 +179,13 @@ def _preprocess(self):
return False return False
if self._args.enable_seq_precond: if self._args.enable_seq_precond:
command = fio_path +\ command = fio_basic_command +\
' --filename=%s' % block_device +\ ' --filename=%s' % block_device +\
self.__fio_args['seq_precond'] self.__fio_args['seq_precond']
self._commands.append(command) self._commands.append(command)
if self._args.rand_precond_time > 0: if self._args.rand_precond_time > 0:
command = fio_path +\ command = fio_basic_command +\
' --filename=%s' % block_device +\ ' --filename=%s' % block_device +\
' --runtime=%ds' % self._args.rand_precond_time +\ ' --runtime=%ds' % self._args.rand_precond_time +\
self.__fio_args['rand_precond'] self.__fio_args['rand_precond']
...@@ -161,7 +196,7 @@ def _preprocess(self): ...@@ -161,7 +196,7 @@ def _preprocess(self):
io_str = '%s_%s' % (io_pattern, io_type) io_str = '%s_%s' % (io_pattern, io_type)
runtime = getattr(self._args, '%s_runtime' % io_str) runtime = getattr(self._args, '%s_runtime' % io_str)
if runtime > 0: if runtime > 0:
command = fio_path +\ command = fio_basic_command +\
' --filename=%s' % block_device +\ ' --filename=%s' % block_device +\
' --ramp_time=%ds' % getattr(self._args, '%s_ramp_time' % io_str) +\ ' --ramp_time=%ds' % getattr(self._args, '%s_ramp_time' % io_str) +\
' --runtime=%ds' % runtime +\ ' --runtime=%ds' % runtime +\
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
"""Tests for disk-performance benchmark.""" """Tests for disk-performance benchmark."""
import os
import unittest import unittest
from unittest import mock from unittest import mock
...@@ -126,6 +128,8 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device): ...@@ -126,6 +128,8 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device):
curr_test_magic += 1 curr_test_magic += 1
param_str += ' --%s_numjobs=%d' % (io_str, curr_test_magic) param_str += ' --%s_numjobs=%d' % (io_str, curr_test_magic)
curr_test_magic += 1 curr_test_magic += 1
# Verify
param_str += ' --verify=md5'
benchmark = benchmark_class(benchmark_name, parameters=param_str) benchmark = benchmark_class(benchmark_name, parameters=param_str)
# Check basic information # Check basic information
...@@ -148,10 +152,12 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device): ...@@ -148,10 +152,12 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device):
# Sequential precondition # Sequential precondition
assert ('--filename=%s' % block_device in benchmark._commands[command_idx]) assert ('--filename=%s' % block_device in benchmark._commands[command_idx])
assert ('--verify=md5' in benchmark._commands[command_idx])
command_idx += 1 command_idx += 1
# Random precondition # Random precondition
assert ('--filename=%s' % block_device in benchmark._commands[command_idx]) assert ('--filename=%s' % block_device in benchmark._commands[command_idx])
assert ('--runtime=%d' % curr_test_magic in benchmark._commands[command_idx]) assert ('--runtime=%d' % curr_test_magic in benchmark._commands[command_idx])
assert ('--verify=md5' in benchmark._commands[command_idx])
curr_test_magic += 1 curr_test_magic += 1
command_idx += 1 command_idx += 1
# Seq/rand read/write # Seq/rand read/write
...@@ -170,8 +176,64 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device): ...@@ -170,8 +176,64 @@ def test_disk_performance_benchmark_enabled(self, mock_is_block_device):
curr_test_magic += 1 curr_test_magic += 1
if io_type == 'rw': if io_type == 'rw':
assert ('--rwmixread=%d' % default_rwmixread in benchmark._commands[command_idx]) assert ('--rwmixread=%d' % default_rwmixread in benchmark._commands[command_idx])
assert ('--verify=md5' in benchmark._commands[command_idx])
command_idx += 1 command_idx += 1
@mock.patch('pathlib.Path.is_block_device')
def test_disk_performance_env_parsing(self, mock_is_block_device):
"""Test disk-performance benchmark env parsing."""
mock_is_block_device.return_value = True
benchmark_name = 'disk-benchmark'
(benchmark_class,
predefine_params) = BenchmarkRegistry._BenchmarkRegistry__select_benchmark(benchmark_name, Platform.CPU)
assert (benchmark_class)
# Test valid envs
proc_ranks = ['0', '1', '2', '3']
block_devices = ['/dev/nvme0n1', '/dev/nvme2n1', '/dev/nvme1n1', '/dev/nvme3n1']
numa_nodes = ['0', '0', '1', '1']
os.environ['NUMA_NODES'] = ','.join(numa_nodes)
param_str = '--block_devices ' + ' '.join(block_devices)
for proc_rank in proc_ranks:
os.environ['PROC_RANK'] = proc_rank
benchmark = benchmark_class(benchmark_name, parameters=param_str)
# Check basic information
assert (benchmark)
ret = benchmark._preprocess()
assert (ret is True)
assert (benchmark.return_code == ReturnCode.SUCCESS)
assert (benchmark.name == 'disk-benchmark')
assert (benchmark.type == BenchmarkType.MICRO)
# Check command list
# seq/rand read = 2 commands
assert (2 == len(benchmark._commands))
command_idx = 0
commands_per_device = 2
block_device = block_devices[int(proc_rank)]
assert (benchmark._args.numa == int(numa_nodes[int(proc_rank)]))
assert (benchmark._commands[command_idx].startswith(f'numactl -N {benchmark._args.numa}'))
for _ in range(commands_per_device):
assert (f'--filename={block_device}' in benchmark._commands[command_idx])
command_idx += 1
# Test invalid envs
os.environ['PROC_RANK'] = '4'
benchmark = benchmark_class(benchmark_name, parameters=param_str)
assert (benchmark)
ret = benchmark._preprocess()
assert (ret is False)
assert (benchmark.return_code == ReturnCode.INVALID_ARGUMENT)
assert (benchmark.name == 'disk-benchmark')
assert (benchmark.type == BenchmarkType.MICRO)
del os.environ['NUMA_NODES']
del os.environ['PROC_RANK']
@decorator.load_data('tests/data/disk_performance.log') @decorator.load_data('tests/data/disk_performance.log')
def test_disk_performance_result_parsing(self, test_raw_output): def test_disk_performance_result_parsing(self, test_raw_output):
"""Test disk-performance benchmark result parsing.""" """Test disk-performance benchmark result parsing."""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment