Skip to content

Commit

Permalink
metrics: FIO ci test enablement
Browse files Browse the repository at this point in the history
This PR enables the new FIO test based on the containerd client
which is used to track the I/O metrics in the kata-ci environment.

Additionally this PR fixes the parsing of results.

Fixes: kata-containers#8199

Signed-off-by: David Esparza <david.esparza.borquez@intel.com>
  • Loading branch information
dborquez committed Oct 30, 2023
1 parent 873386a commit 1626253
Show file tree
Hide file tree
Showing 5 changed files with 143 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,58 @@ midval = 98.0
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure sequential read throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results sequential\"] | .[] | .[] | .read.bw | select( . != null )"
checktype = "mean"
midval = 312776
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure sequential write throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results sequential\"] | .[] | .[] | .write.bw | select( . != null )"
checktype = "mean"
midval = 307948
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure random read throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results random\"] | .[] | .[] | .randread.bw | select( . != null )"
checktype = "mean"
midval = 1351339
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure random write throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results random\"] | .[] | .[] | .randwrite.bw | select( . != null )"
checktype = "mean"
midval = 1440540.7
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "latency"
type = "json"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,58 @@ midval = 98.0
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure sequential read throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results sequential\"] | .[] | .[] | .read.bw | select( . != null )"
checktype = "mean"
midval = 327066.8
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure sequential write throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results sequential\"] | .[] | .[] | .write.bw | select( . != null )"
checktype = "mean"
midval = 309023.65
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure random read throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results random\"] | .[] | .[] | .randread.bw | select( . != null )"
checktype = "mean"
midval = 1301793.45
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "fio"
type = "json"
description = "measure random write throughput using fio"
# Min and Max values to set a 'range' that
# the median of the CSV Results data must fall
# within (inclusive)
checkvar = "[.\"fio\".\"Results random\"] | .[] | .[] | .randwrite.bw | select( . != null )"
checktype = "mean"
midval = 1457926.8
minpercent = 20.0
maxpercent = 20.0

[[metric]]
name = "latency"
type = "json"
Expand Down
4 changes: 2 additions & 2 deletions tests/metrics/gha-run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,9 @@ function run_test_tensorflow() {
}

function run_test_fio() {
info "Skipping FIO test temporarily using ${KATA_HYPERVISOR} hypervisor"
info "Running FIO test using ${KATA_HYPERVISOR} hypervisor"

# bash tests/metrics/storage/fio-k8s/fio-test-ci.sh
bash tests/metrics/storage/fio_test.sh
}

function run_test_iperf() {
Expand Down
6 changes: 3 additions & 3 deletions tests/metrics/storage/fio-dockerfile/workload/fio_bench.sh
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ function launch_workload() {
local test_name="${io_type}_${block_size}_nj-${num_jobs}_${rate_process}_iodepth-${iodepth}_io-direct-${disable_buffered}"

setup_workload
rm -f "${summary_file_local}" > /dev/null 2>&1
rm -f "${summary_file_local}" >/dev/null 2>&1
fio \
--name="${test_name}" \
--output-format="json" \
Expand All @@ -88,12 +88,12 @@ function launch_workload() {
--iodepth="${iodepth}" \
--gtod_reduce="1" \
--randrepeat="1" \
| tee -a ${summary_file_local} > /dev/null 2>&1
--output "${summary_file_local}" >/dev/null 2>&1
}

function print_latest_results() {
[ ! -f "${summary_file_local}" ] && echo "Error: no results to display; you must run a test before requesting results display" && exit 1
echo "$(cat ${summary_file_local})"
cat "${summary_file_local}"
}

function delete_workload() {
Expand Down
55 changes: 34 additions & 21 deletions tests/metrics/storage/fio_test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,24 +19,31 @@ DOCKERFILE="${SCRIPT_PATH}/fio-dockerfile/Dockerfile"
PAYLOAD_ARGS="${PAYLOAD_ARGS:-tail -f /dev/null}"
TEST_NAME="fio"
REQUIRED_CMDS=("jq" "script")
TMP_DIR=$(mktemp --tmpdir -d fio.XXXXXXXXXX)
results_file="${TMP_DIR}/fio_results.json"
results_read=""
results_write=""

# Fio default number of jobs
nj=4

function release_resources() {
sudo -E "${CTR_EXE}" t exec --exec-id "$(random_name)" ${CONTAINER_ID} sh -c "./fio_bench.sh delete-workload"
sudo -E "${CTR_EXE}" t exec --exec-id "$(random_name)" "${CONTAINER_ID}" sh -c "./fio_bench.sh delete-workload"
sudo -E "${CTR_EXE}" t kill -a -s SIGKILL "${CONTAINER_ID}"
sudo -E "${CTR_EXE}" c rm "${CONTAINER_ID}"
rm -rf "${TMP_DIR}"
sleep 0.5
clean_env_ctr
info "fio test end"
info "removing containers done"
}

trap release_resources EXIT

function setup() {
info "setup fio test"
clean_env_ctr
check_cmds "${REQUIRED_CMDS[@]}"
check_ctr_images "$IMAGE" "$DOCKERFILE"
clean_env_ctr
init_env

# drop caches
Expand Down Expand Up @@ -108,7 +115,7 @@ function convert_results_to_json() {
"bw_stddev" : "${bw_stddev}",
"iops" : "${iops}",
"iops_stddev" : "${iops_stddev}",
"units" : "Kb"
"units" : "KB/s"
}
}
EOF
Expand All @@ -117,46 +124,52 @@ EOF
}

function store_results() {
local data_r="${1}"
local data_w="${2}"
local title="${3}"
local title="${1}"

[ -z "${data_r}" ] || [ -z "${data_w}" ] || [ -z "${title}" ] && die "Missing data and/or title when trying storing results."
[ -z "${results_read}" ] || [ -z "${results_write}" ] || [ -z "${title}" ] && die "Missing data and/or title when trying storing results."

metrics_json_start_array
extract_test_params "${data_r}"
parse_results "${data_r}"
parse_results "${data_w}"
extract_test_params "${results_read}"
parse_results "${results_read}"
parse_results "${results_write}"
metrics_json_end_array "${title}"
}

function main() {
setup

# Collect bs=4K, num_jobs=4, io-direct, io-depth=2
# Collect bs=4K, num_jobs=4, io-direct, io-depth=8
info "Processing sequential type workload"
sudo -E "${CTR_EXE}" t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh run-read-4k ${nj}" >/dev/null 2>&1
local results_read_4K="$(script -qc "sudo -E ${CTR_EXE} t exec -t --exec-id ${RANDOM} ${CONTAINER_ID} sh -c './fio_bench.sh print-latest-results'")"
sudo -E ${CTR_EXE} t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh print-latest-results" >"${results_file}"
results_read=$(<"${results_file}")

sleep 0.5
sudo -E "${CTR_EXE}" t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh run-write-4k ${nj}" >/dev/null 2>&1
local results_write_4K="$(script -qc "sudo -E ${CTR_EXE} t exec -t --exec-id ${RANDOM} ${CONTAINER_ID} sh -c './fio_bench.sh print-latest-results'")"
sudo -E ${CTR_EXE} t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh print-latest-results" >"${results_file}"
results_write=$(<"${results_file}")

# parse results sequential
metrics_json_init
store_results "Results sequential"

# Collect bs=64K, num_jobs=4, io-direct, io-depth=2
# Collect bs=64K, num_jobs=4, io-direct, io-depth=8
info "Processing random type workload"
sleep 0.5
sudo -E "${CTR_EXE}" t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh run-randread-64k ${nj}" >/dev/null 2>&1
local results_rand_read_64K="$(script -qc "sudo -E ${CTR_EXE} t exec -t --exec-id ${RANDOM} ${CONTAINER_ID} sh -c './fio_bench.sh print-latest-results'")"
sudo -E ${CTR_EXE} t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh print-latest-results" >"${results_file}"
results_read=$(<"${results_file}")

sleep 0.5
sudo -E "${CTR_EXE}" t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh run-randwrite-64k ${nj}" >/dev/null 2>&1
local results_rand_write_64K="$(script -qc "sudo -E ${CTR_EXE} t exec -t --exec-id ${RANDOM} ${CONTAINER_ID} sh -c './fio_bench.sh print-latest-results'")"
sudo -E ${CTR_EXE} t exec --exec-id "${RANDOM}" ${CONTAINER_ID} sh -c "./fio_bench.sh print-latest-results" >"${results_file}"
results_write=$(<"${results_file}")

# parse results
metrics_json_init
store_results "${results_read_4K}" "${results_write_4K}" "Results sequential"
store_results "${results_rand_read_64K}" "${results_rand_write_64K}" "Results random"
# parse results random
store_results "Results random"
metrics_json_save
}

main "$@"
info "fio test end"

0 comments on commit 1626253

Please sign in to comment.