|
22 | 22 | ) |
23 | 23 |
|
24 | 24 |
|
| 25 | +def establish_connection(): |
| 26 | + if args.clouds_yaml is None: |
| 27 | + config = loader.OpenStackConfig() |
| 28 | + else: |
| 29 | + LOGGER.info(f"Loading connection configuration from {args.clouds_yaml}") |
| 30 | + config = loader.OpenStackConfig(config_files=[args.clouds_yaml]) |
| 31 | + cloud_config = config.get_one(args.os_cloud) |
| 32 | + return Connection(config=cloud_config) |
| 33 | + |
| 34 | + |
| 35 | +def generated_clouds_yaml(): |
| 36 | + LOGGER.info(f"Creating a clouds yaml : {args.generate_clouds_yaml}") |
| 37 | + clouds_yaml_data_new = {"clouds": clouds_yaml_data} |
| 38 | + initial_entries = 0 |
| 39 | + generated_entries = 0 |
| 40 | + total_entries = 0 |
| 41 | + if os.path.exists(args.generate_clouds_yaml): |
| 42 | + with open(args.generate_clouds_yaml, "r") as file: |
| 43 | + existing_data = yaml.safe_load(file) |
| 44 | + |
| 45 | + initial_entries = len(existing_data.get("clouds", [])) |
| 46 | + backup_file = f"{args.generate_clouds_yaml}_{iso_timestamp()}" |
| 47 | + logging.warning( |
| 48 | + f"File {args.generate_clouds_yaml}, making an backup to {backup_file} and adding the new values" |
| 49 | + ) |
| 50 | + shutil.copy2( |
| 51 | + args.generate_clouds_yaml, |
| 52 | + f"{args.generate_clouds_yaml}_{iso_timestamp()}", |
| 53 | + ) |
| 54 | + |
| 55 | + generated_entries = len(clouds_yaml_data_new.get("clouds", [])) |
| 56 | + clouds_yaml_data_new = deep_merge_dict(existing_data, clouds_yaml_data_new) |
| 57 | + total_entries = len(clouds_yaml_data_new.get("clouds", [])) |
| 58 | + with open(args.generate_clouds_yaml, "w") as file: |
| 59 | + yaml.dump( |
| 60 | + clouds_yaml_data_new, |
| 61 | + file, |
| 62 | + default_flow_style=False, |
| 63 | + explicit_start=True, |
| 64 | + ) |
| 65 | + LOGGER.info( |
| 66 | + f"Generated {generated_entries} entries, number of entries in " |
| 67 | + f"{args.generate_clouds_yaml} is now {total_entries} (old {initial_entries} entries)" |
| 68 | + ) |
| 69 | + |
| 70 | + |
25 | 71 | LOGGER = logging.getLogger() |
26 | 72 |
|
27 | 73 | parser = argparse.ArgumentParser(prog="Create workloads on openstack installations") |
|
141 | 187 |
|
142 | 188 | setup_logging(args.log_level) |
143 | 189 |
|
144 | | - |
145 | | -def establish_connection(): |
146 | | - if args.clouds_yaml is None: |
147 | | - config = loader.OpenStackConfig() |
148 | | - else: |
149 | | - LOGGER.info(f"Loading connection configuration from {args.clouds_yaml}") |
150 | | - config = loader.OpenStackConfig(config_files=[args.clouds_yaml]) |
151 | | - cloud_config = config.get_one(args.os_cloud) |
152 | | - return Connection(config=cloud_config) |
153 | | - |
154 | | - |
155 | 190 | time_start = time.time() |
156 | 191 |
|
157 | 192 | Config.load_config(args.config) |
158 | 193 | Config.show_effective_config() |
159 | 194 |
|
| 195 | + |
160 | 196 | if args.create_domains: |
161 | 197 | conn = establish_connection() |
162 | 198 | workload_domains: dict[str, WorkloadGeneratorDomain] = dict() |
@@ -191,42 +227,8 @@ def establish_connection(): |
191 | 227 | machine_obj.delete_machine() |
192 | 228 |
|
193 | 229 | if args.generate_clouds_yaml: |
194 | | - LOGGER.info(f"Creating a clouds yaml : {args.generate_clouds_yaml}") |
195 | | - clouds_yaml_data_new = {"clouds": clouds_yaml_data} |
196 | | - |
197 | | - initial_entries = 0 |
198 | | - generated_entries = 0 |
199 | | - total_entries = 0 |
200 | | - |
201 | | - if os.path.exists(args.generate_clouds_yaml): |
202 | | - with open(args.generate_clouds_yaml, "r") as file: |
203 | | - existing_data = yaml.safe_load(file) |
204 | | - |
205 | | - initial_entries = len(existing_data.get("clouds", [])) |
206 | | - backup_file = f"{args.generate_clouds_yaml}_{iso_timestamp()}" |
207 | | - logging.warning( |
208 | | - f"File {args.generate_clouds_yaml}, making an backup to {backup_file} and adding the new values" |
209 | | - ) |
210 | | - shutil.copy2( |
211 | | - args.generate_clouds_yaml, |
212 | | - f"{args.generate_clouds_yaml}_{iso_timestamp()}", |
213 | | - ) |
214 | | - |
215 | | - generated_entries = len(clouds_yaml_data_new.get("clouds", [])) |
216 | | - clouds_yaml_data_new = deep_merge_dict( |
217 | | - existing_data, clouds_yaml_data_new |
218 | | - ) |
219 | | - total_entries = len(clouds_yaml_data_new.get("clouds", [])) |
220 | | - |
221 | | - with open(args.generate_clouds_yaml, "w") as file: |
222 | | - yaml.dump( |
223 | | - clouds_yaml_data_new, |
224 | | - file, |
225 | | - default_flow_style=False, |
226 | | - explicit_start=True, |
227 | | - ) |
228 | | - LOGGER.info(f"Generated {generated_entries} entries, number of entries in " |
229 | | - f"{args.generate_clouds_yaml} is now {total_entries} (old {initial_entries} entries)") |
| 230 | + generated_clouds_yaml() |
| 231 | + |
230 | 232 | sys.exit(0) |
231 | 233 | elif args.delete_projects: |
232 | 234 | conn = establish_connection() |
|
0 commit comments