|
5 | 5 | # |
6 | 6 | import logging |
7 | 7 | import tempfile |
| 8 | +import datetime |
8 | 9 |
|
9 | 10 | import redis |
10 | 11 |
|
| 12 | +from redisbench_admin.run.run import calculate_client_tool_duration_and_check |
| 13 | +from redisbench_admin.run_local.local_helpers import ( |
| 14 | + check_benchmark_binaries_local_requirements, |
| 15 | + run_local_benchmark, |
| 16 | +) |
| 17 | + |
11 | 18 | from redisbench_admin.environments.oss_cluster import ( |
12 | 19 | spin_up_local_redis_cluster, |
13 | 20 | setup_redis_cluster_from_conns, |
14 | 21 | ) |
15 | 22 | from redisbench_admin.environments.oss_standalone import spin_up_local_redis |
16 | 23 | from redisbench_admin.run.cluster import cluster_init_steps |
17 | | -from redisbench_admin.run.common import run_redis_pre_steps |
| 24 | +from redisbench_admin.run.common import ( |
| 25 | + run_redis_pre_steps, |
| 26 | + check_dbconfig_tool_requirement, |
| 27 | + prepare_benchmark_parameters, |
| 28 | + dbconfig_keyspacelen_check, |
| 29 | +) |
18 | 30 | from redisbench_admin.utils.benchmark_config import extract_redis_dbconfig_parameters |
19 | 31 | from redisbench_admin.utils.local import ( |
20 | 32 | check_dataset_local_requirements, |
@@ -101,15 +113,64 @@ def local_db_spin( |
101 | 113 | dataset_load_timeout_secs, |
102 | 114 | ) |
103 | 115 |
|
104 | | - for redis_process in redis_processes: |
105 | | - if is_process_alive(redis_process) is False: |
106 | | - raise Exception("Redis process is not alive. Failing test.") |
107 | | - |
108 | 116 | r = redis.StrictRedis(port=args.port) |
109 | 117 | redis_conns.append(r) |
| 118 | + |
| 119 | + for shardn, redis_process in enumerate(redis_processes): |
| 120 | + logging.info( |
| 121 | + "Checking if shard #{} process with pid={} is alive".format( |
| 122 | + shardn + 1, redis_process.pid |
| 123 | + ) |
| 124 | + ) |
| 125 | + if is_process_alive(redis_process) is False: |
| 126 | + raise Exception("Redis process is not alive. Failing test.") |
| 127 | + |
110 | 128 | if setup_type == "oss-cluster": |
111 | 129 |
|
112 | 130 | cluster_init_steps(clusterconfig, redis_conns, local_module_file) |
113 | 131 |
|
| 132 | + if check_dbconfig_tool_requirement(benchmark_config): |
| 133 | + logging.info("Detected the requirements to load data via client tool") |
| 134 | + local_benchmark_output_filename = "{}/load-data.txt".format(temporary_dir) |
| 135 | + ( |
| 136 | + benchmark_tool, |
| 137 | + full_benchmark_path, |
| 138 | + benchmark_tool_workdir, |
| 139 | + ) = check_benchmark_binaries_local_requirements( |
| 140 | + benchmark_config, args.allowed_tools, "./binaries", "dbconfig" |
| 141 | + ) |
| 142 | + |
| 143 | + # prepare the benchmark command |
| 144 | + command, command_str = prepare_benchmark_parameters( |
| 145 | + benchmark_config, |
| 146 | + full_benchmark_path, |
| 147 | + args.port, |
| 148 | + "localhost", |
| 149 | + local_benchmark_output_filename, |
| 150 | + False, |
| 151 | + benchmark_tool_workdir, |
| 152 | + cluster_api_enabled, |
| 153 | + "dbconfig", |
| 154 | + ) |
| 155 | + |
| 156 | + # run the benchmark |
| 157 | + load_via_benchmark_start_time = datetime.datetime.now() |
| 158 | + run_local_benchmark(benchmark_tool, command) |
| 159 | + load_via_benchmark_end_time = datetime.datetime.now() |
| 160 | + load_via_benchmark_duration_seconds = calculate_client_tool_duration_and_check( |
| 161 | + load_via_benchmark_end_time, load_via_benchmark_start_time |
| 162 | + ) |
| 163 | + logging.info( |
| 164 | + "Loading data via benchmark tool took {} secs.".format( |
| 165 | + load_via_benchmark_duration_seconds |
| 166 | + ) |
| 167 | + ) |
| 168 | + |
| 169 | + dbconfig_keyspacelen_check( |
| 170 | + benchmark_config, |
| 171 | + redis_conns, |
| 172 | + ) |
| 173 | + |
114 | 174 | run_redis_pre_steps(benchmark_config, redis_conns[0], required_modules) |
| 175 | + |
115 | 176 | return cluster_api_enabled, redis_conns, redis_processes |
0 commit comments