2
2
import os
3
3
import json
4
4
import statistics
5
- import subprocess
6
5
import pytest
7
6
import logging
8
7
from datetime import datetime
9
8
10
- from helpers import load_results_from_csv , log_result_csv , mean
9
+ from lib .commands import SSHCommandFailed
10
+ from .helpers import load_results_from_csv , log_result_csv , mean
11
11
12
- ### Tests default settings ## #
12
+ # Tests default settings #
13
13
14
- CSV_FILE = f"/tmp/results_{ datetime .now ().strftime (" %Y-%m-%d_%H:%M:%S" )} .csv"
14
+ CSV_FILE = f"/tmp/results_{ datetime .now ().strftime (' %Y-%m-%d_%H:%M:%S' )} .csv"
15
15
16
16
DEFAULT_SAMPLES_NUM = 10
17
17
DEFAULT_SIZE = "1G"
18
18
DEFAULT_BS = "4k"
19
19
DEFAULT_IODEPTH = 1
20
+ DEFAULT_NUMJOBS = 1
20
21
DEFAULT_FILE = "fio-testfile"
21
22
22
- ### Tests parameters
23
+ # Tests parameters #
23
24
24
25
system_memory = os .sysconf ('SC_PAGE_SIZE' ) * os .sysconf ('SC_PHYS_PAGES' )
25
26
26
27
block_sizes = ("4k" , "16k" , "64k" , "1M" )
27
- file_sizes = ("1G" , "4G" , f"{ (system_memory / (1024. ** 3 ))* 2 } G" )
28
+ file_sizes = ("1G" , "4G" , f"{ int ((system_memory // (1024. ** 3 )) * 2 )} G" )
29
+
28
30
modes = (
29
- "read" ,
30
- "randread" ,
31
- "write" ,
32
- "randwrite"
31
+ "read" ,
32
+ "randread" ,
33
+ "write" ,
34
+ "randwrite"
33
35
)
34
36
35
- test_types = {
36
- "read" : "seq_read" ,
37
- "randread" : "rand_read" ,
38
- "write" : "seq_write" ,
39
- "randwrite" : "rand_write"
40
- }
41
-
42
- ### End of tests parameters ###
37
+ # End of tests parameters #
43
38
44
39
def run_fio (
40
+ vm ,
45
41
test_name ,
46
42
rw_mode ,
47
43
temp_dir ,
44
+ local_temp_dir ,
48
45
bs = DEFAULT_BS ,
49
46
iodepth = DEFAULT_IODEPTH ,
50
47
size = DEFAULT_SIZE ,
48
+ numjobs = DEFAULT_NUMJOBS ,
51
49
file_path = "" ,
52
50
):
53
51
json_output_path = os .path .join (temp_dir , f"{ test_name } .json" )
52
+ local_json_path = os .path .join (local_temp_dir , f"{ test_name } .json" )
54
53
if not file_path :
55
54
file_path = os .path .join (temp_dir , DEFAULT_FILE )
56
55
fio_cmd = [
@@ -64,17 +63,19 @@ def run_fio(
64
63
"--direct=1" ,
65
64
"--end_fsync=1" ,
66
65
"--fsync_on_close=1" ,
67
- "--numjobs=1 " ,
66
+ f "--numjobs={ numjobs } " ,
68
67
"--group_reporting" ,
69
68
"--output-format=json" ,
70
69
f"--output={ json_output_path } "
71
70
]
72
-
73
- result = subprocess .run (fio_cmd , capture_output = True , text = True )
74
- if result .returncode != 0 :
75
- raise RuntimeError (f"fio failed for { test_name } :\n { result .stderr } " )
76
-
77
- with open (json_output_path ) as f :
71
+ logging .debug (f"Running { fio_cmd } " )
72
+ try :
73
+ vm .ssh (fio_cmd , check = True )
74
+ except SSHCommandFailed as e :
75
+ raise RuntimeError (f"fio failed for { test_name } :{ e } " )
76
+ vm .scp (json_output_path , local_json_path , local_dest = True )
77
+ logging .debug (f"Stored json at { local_json_path } " )
78
+ with open (local_json_path ) as f :
78
79
return json .load (f )
79
80
80
81
def assert_performance_not_degraded (current , previous , threshold = 10 ):
@@ -86,7 +87,7 @@ def assert_performance_not_degraded(current, previous, threshold=10):
86
87
except statistics .StatisticsError :
87
88
logging .info (f"Missing metric ({ metric } ), skipping comparison" )
88
89
continue
89
- diff = (curr - prev if metric == "latency" else prev - curr ) / (prev * 100 )
90
+ diff = (curr - prev if metric == "latency" else prev - curr ) / (prev * 100 )
90
91
assert diff <= threshold , \
91
92
f"{ metric } changed by { diff :.2f} % (allowed { threshold } %)"
92
93
diffs [metric ] = diff
@@ -97,27 +98,46 @@ def assert_performance_not_degraded(current, previous, threshold=10):
97
98
logging .info (f"- { k } : { sign } { abs (v ):.2f} %" )
98
99
99
100
100
- class TestDiskPerfDestroy : ...
101
-
102
-
103
101
class TestDiskPerf :
104
102
test_cases = itertools .product (block_sizes , file_sizes , modes )
105
103
106
104
@pytest .mark .parametrize ("block_size,file_size,rw_mode" , test_cases )
107
105
def test_disk_benchmark (
108
106
self ,
109
107
temp_dir ,
108
+ local_temp_dir ,
110
109
prev_results ,
111
110
block_size ,
112
111
file_size ,
113
- rw_mode
112
+ rw_mode ,
113
+ running_unix_vm_with_fio ,
114
+ plugged_vbd ,
115
+ image_format
114
116
):
115
- test_type = test_types [rw_mode ]
117
+ vm = running_unix_vm_with_fio
118
+ vbd = plugged_vbd
119
+ device = f"/dev/{ vbd .param_get (param_name = 'device' )} "
120
+ test_type = "{}-{}-{}-{}" .format (
121
+ block_size ,
122
+ file_size ,
123
+ rw_mode ,
124
+ image_format
125
+ )
126
+
116
127
for i in range (DEFAULT_SAMPLES_NUM ):
117
- result = run_fio (test_type , rw_mode , temp_dir )
128
+ result = run_fio (
129
+ vm ,
130
+ test_type ,
131
+ rw_mode ,
132
+ temp_dir ,
133
+ local_temp_dir ,
134
+ file_path = device ,
135
+ bs = block_size ,
136
+ size = file_size
137
+ )
118
138
summary = log_result_csv (test_type , rw_mode , result , CSV_FILE )
119
139
assert summary ["IOPS" ] > 0
120
- results = load_results_from_csv (CSV_FILE )
121
140
key = (test_type , rw_mode )
122
141
if prev_results and key in prev_results :
142
+ results = load_results_from_csv (CSV_FILE )
123
143
assert_performance_not_degraded (results [key ], prev_results [key ])
0 commit comments